Added: Migrator
commit
926a7cd699
|
@ -0,0 +1,9 @@
|
|||
HOST=http://127.0.0.1:8001
|
||||
USER=0123
|
||||
PASS=Welcome@1
|
||||
FILE_ROOT="./26062024"
|
||||
GROUP_FILE="/DE1.DBF"
|
||||
PEN_FILE="/PA.DBF"
|
||||
SUBSCRIPTION_FILE="/DE2.DBF"
|
||||
USER_FILE="/DE2.DBF"
|
||||
TXN_DIR="/DTFILES"
|
|
@ -0,0 +1,6 @@
|
|||
[users.csv]
|
||||
ColNameHeader=True
|
||||
CharacterSet=ANSI
|
||||
Format=CSVDelimited
|
||||
DecimalSymbol=.
|
||||
MaxScanRows=64
|
|
@ -0,0 +1,22 @@
|
|||
import migrate_groups
|
||||
import migrate_users
|
||||
import migrate_subscriptions
|
||||
import migrate_transactions
|
||||
|
||||
def run_all_scripts():
|
||||
print("Migrating Groups")
|
||||
migrate_groups
|
||||
print("Group migration completed.")
|
||||
print("Migrating Users.")
|
||||
migrate_users
|
||||
print("User migration completed")
|
||||
print("Migrating Subscription")
|
||||
migrate_subscriptions
|
||||
print("Subcription migration completed.")
|
||||
|
||||
print("Migrating transaction")
|
||||
migrate_transactions
|
||||
print("Transaction Migration completed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_all_scripts()
|
|
@ -0,0 +1,191 @@
|
|||
ACTP,MINBAL,RDATE,NOFI,PA_NA
|
||||
10,600,2003-10-30,65,10
|
||||
11,400,2004-01-31,65,10
|
||||
12,550,2004-09-22,65,10
|
||||
13,500,2005-03-03,65,10
|
||||
14,500,2005-08-31,66,10
|
||||
15,400,2005-09-22,66,10
|
||||
16,600,2005-12-14,66,10
|
||||
17,400,2006-01-10,66,10
|
||||
18,400,2006-04-26,66,10
|
||||
19,600,2006-08-21,66,10
|
||||
20,400,2006-10-12,66,10
|
||||
21,550,2007-01-13,66,10
|
||||
22,500,2007-04-09,66,10
|
||||
23,550,2007-06-02,66,10
|
||||
24,550,2007-08-16,66,10
|
||||
25,400,2007-12-22,66,10
|
||||
2A,300,2008-04-16,66,10
|
||||
2B,400,2008-09-03,66,10
|
||||
2C,300,2009-01-24,66,10
|
||||
2D,400,2009-05-07,66,10
|
||||
2E,300,2009-07-03,66,10
|
||||
2F,400,2009-07-18,66,10
|
||||
2G,300,2009-09-08,66,10
|
||||
2H,400,2009-09-08,66,10
|
||||
2I,300,2010-01-09,66,10
|
||||
2J,400,2010-02-05,66,10
|
||||
2K,300,2010-05-20,66,10
|
||||
2L,400,2010-07-19,66,10
|
||||
2M,400,2010-10-05,66,10
|
||||
2N,400,2010-11-16,66,10
|
||||
2O,300,2011-01-18,66,10
|
||||
2P,400,2011-02-11,66,10
|
||||
2Q,300,2011-03-16,66,10
|
||||
2R,400,2011-04-13,66,10
|
||||
2S,300,2011-06-16,66,10
|
||||
2T,300,2011-07-15,66,10
|
||||
2U,300,2011-08-24,66,10
|
||||
2V,400,2011-09-14,66,10
|
||||
2W,400,2011-10-14,66,10
|
||||
2X,400,2011-11-15,66,10
|
||||
2Y,300,2012-02-13,66,10
|
||||
2Z,400,2012-03-20,66,10
|
||||
4A,300,2014-06-18,66,10
|
||||
4B,400,2014-07-18,66,10
|
||||
4C,400,2014-08-12,66,10
|
||||
4D,400,2014-09-19,66,10
|
||||
4E,400,2014-10-11,66,10
|
||||
4F,400,2014-11-17,66,10
|
||||
4G,300,2014-12-19,66,10
|
||||
4H,400,2015-01-22,66,10
|
||||
4I,300,2015-02-14,66,10
|
||||
4J,400,2015-03-16,66,10
|
||||
4K,300,2015-04-17,66,10
|
||||
4L,300,2015-05-14,66,10
|
||||
4M,300,2015-06-23,66,10
|
||||
4N,300,2015-07-23,66,10
|
||||
4O,400,2015-08-12,66,10
|
||||
4P,300,2015-09-23,66,10
|
||||
4Q,400,2015-10-12,66,10
|
||||
4R,400,2015-11-25,66,10
|
||||
4S,400,2015-12-16,66,10
|
||||
4T,300,2016-01-18,66,10
|
||||
4U,300,2016-02-16,66,10
|
||||
4V,300,2016-03-19,66,10
|
||||
4W,400,2017-01-01,69,10
|
||||
5A,600,2015-02-24,66,20
|
||||
5B,1000,2015-03-14,66,30
|
||||
5C,600,2015-05-12,66,20
|
||||
5D,600,2015-06-11,66,20
|
||||
5E,600,2015-08-24,66,20
|
||||
5F,600,2015-09-16,66,20
|
||||
5G,1000,2015-11-16,66,30
|
||||
AA,400,2012-04-16,66,10
|
||||
BB,300,2012-05-16,66,10
|
||||
C0,300,2012-06-18,66,10
|
||||
CA,,,,
|
||||
DD,300,2012-07-17,66,10
|
||||
DH,,,,
|
||||
EE,400,2012-08-22,66,10
|
||||
FF,400,2012-09-17,66,10
|
||||
GG,400,2012-10-15,66,10
|
||||
HH,400,2012-11-23,66,10
|
||||
II,300,2012-12-24,66,10
|
||||
JJ,300,2013-01-12,66,10
|
||||
KK,400,2013-02-25,66,10
|
||||
L2,50,2002-08-05,63,10
|
||||
L3,50,2002-08-05,63,10
|
||||
L4,100,2008-01-05,66,10
|
||||
L5,100,2008-01-31,66,10
|
||||
L6,100,2013-10-11,66,2
|
||||
L7,100,2013-10-16,66,2
|
||||
L8,100,2014-01-18,66,2
|
||||
L9,100,2014-01-24,66,2
|
||||
LL,300,2013-03-16,66,10
|
||||
MM,400,2013-04-16,66,10
|
||||
NN,400,2013-05-22,66,10
|
||||
OO,300,2013-06-15,66,10
|
||||
PP,400,2013-07-25,66,10
|
||||
QQ,300,2013-08-17,66,10
|
||||
RR,300,2013-09-16,66,10
|
||||
SB,,,,
|
||||
SS,400,2013-10-14,66,10
|
||||
TT,300,2013-11-25,66,10
|
||||
UU,400,2013-12-24,66,10
|
||||
V4,350,2000-04-24,65,10
|
||||
V5,350,2000-04-25,65,10
|
||||
V6,350,2000-04-26,65,10
|
||||
V7,400,2000-07-19,65,10
|
||||
V8,350,2000-10-21,65,10
|
||||
V9,400,2001-10-05,65,10
|
||||
VV,400,2014-01-21,66,10
|
||||
WW,300,2014-02-22,66,10
|
||||
XX,400,2014-03-12,66,10
|
||||
YY,400,2014-04-17,66,10
|
||||
Z0,300,2014-05-23,66,10
|
||||
,,,,
|
||||
PEN,0,,,
|
||||
5H,600,2017-02-01,69,20
|
||||
4X,300,2017-03-15,69,10
|
||||
5I,1000,2017-04-14,69,30
|
||||
4Y,400,2017-05-16,69,10
|
||||
5J,600,2017-06-15,69,20
|
||||
4Z,400,2017-07-11,69,10
|
||||
6A,300,2017-08-12,69,10
|
||||
5K,600,2017-09-15,69,20
|
||||
5L,600,2017-10-13,69,20
|
||||
6B,400,2017-10-17,69,10
|
||||
6C,300,2017-11-14,69,10
|
||||
5M,1000,2017-11-14,69,30
|
||||
6D,400,2017-12-13,69,10
|
||||
5N,600,2018-01-23,69,20
|
||||
6E,300,2018-02-13,69,10
|
||||
6F,400,2018-03-19,69,10
|
||||
5O,600,2018-04-16,69,20
|
||||
6G,300,2018-05-12,69,10
|
||||
6H,400,2018-05-16,69,10
|
||||
6I,400,2018-06-15,69,10
|
||||
5P,600,2018-06-15,69,20
|
||||
5Q,600,2018-07-18,69,20
|
||||
6J,400,2018-08-17,69,10
|
||||
5R,600,2018-09-19,69,20
|
||||
6K,300,2018-10-17,69,10
|
||||
5S,600,2018-11-14,69,20
|
||||
6L,400,2018-11-14,69,10
|
||||
6M,300,2018-12-17,69,10
|
||||
5T,600,2019-01-16,69,20
|
||||
6N,400,2019-02-18,69,10
|
||||
L10,200,2019-04-12,69,5
|
||||
5U,600,2019-04-13,69,20
|
||||
L11,200,2019-04-24,69,5
|
||||
6O,400,2019-05-15,69,10
|
||||
5V,600,2019-06-14,69,20
|
||||
6P,400,2019-07-15,69,10
|
||||
L12,200,2019-07-15,69,5
|
||||
5W,600,2019-08-17,69,20
|
||||
L13,200,2019-10-14,69,5
|
||||
6Q,400,2019-10-15,69,10
|
||||
5X,600,2019-11-14,69,20
|
||||
6R,400,2020-01-18,69,10
|
||||
5Y,600,2020-02-17,69,20
|
||||
5Z,600,2020-05-05,69,20
|
||||
6S,400,2020-05-18,69,10
|
||||
6T,400,2020-07-15,69,10
|
||||
7A,600,2020-08-17,69,20
|
||||
6U,400,2020-10-14,69,10
|
||||
7B,600,2020-11-02,69,20
|
||||
6V,400,2021-01-13,69,10
|
||||
7C,600,2021-02-15,69,20
|
||||
6W,400,2021-04-15,69,10
|
||||
7D,600,2021-05-17,69,20
|
||||
6X,300,2021-07-14,69,10
|
||||
7E,600,2021-08-16,69,20
|
||||
6Y,400,2021-10-13,69,10
|
||||
7F,600,2021-11-22,69,20
|
||||
7G,600,2022-01-13,69,20
|
||||
6Z,400,2022-02-14,69,10
|
||||
8A,400,2022-04-12,69,10
|
||||
7H,600,2022-05-13,69,20
|
||||
7I,600,2022-07-12,69,20
|
||||
8B,400,2022-08-13,69,10
|
||||
7J,600,2022-11-11,69,20
|
||||
7K,600,2023-01-13,69,20
|
||||
7L,600,2023-03-11,69,20
|
||||
8C,400,2023-06-12,69,10
|
||||
7M,600,2023-08-12,69,20
|
||||
8D,400,2023-09-18,69,10
|
||||
7N,600,2023-12-12,69,20
|
||||
7O,600,2024-02-12,69,20
|
||||
8E,400,2024-03-13,69,10
|
||||
7P,600,2024-06-11,69,0
|
|
|
@ -0,0 +1,57 @@
|
|||
import sys
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
from datetime import date
|
||||
import csv
|
||||
import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
host=os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
src_file_grp = os.getenv("FILE_ROOT")+os.getenv("GROUP_FILE")
|
||||
src_file_pa = os.getenv("FILE_ROOT")+os.getenv("PEN_FILE")
|
||||
def get_dynamic_path():
|
||||
# Get today's date in the format YYYYMMDD
|
||||
today_date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
# Construct the dynamic path
|
||||
path = os.path.join("C:\\Humbingo\\to_humbingo", today_date, "groups.csv")
|
||||
return path
|
||||
print("Reading files: ", src_file_grp, src_file_pa)
|
||||
def get_auth_token():
|
||||
url = host+"/auth/token/"
|
||||
print("Using URL: "+url)
|
||||
payload = {"phone_no":login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
try:
|
||||
url = host+"/api/v1/migrateGroups"
|
||||
print("Using URL: "+url)
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
else:
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
send_data_to_api(get_dynamic_path(), token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,71 @@
|
|||
import sys
|
||||
import requests
|
||||
from datetime import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
import csv
|
||||
|
||||
load_dotenv(override=True)
|
||||
host = os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
src_file_subs = os.getenv("FILE_ROOT") + os.getenv("SUBSCRIPTION_FILE")
|
||||
|
||||
def get_dynamic_path():
|
||||
# Get today's date in the format YYYYMMDD
|
||||
today_date = datetime.now().strftime("%Y%m%d")
|
||||
# Construct the dynamic path
|
||||
path = os.path.join("C:\\Humbingo\\to_humbingo", today_date, "subscription.csv")
|
||||
return path
|
||||
|
||||
print("Reading files: ", src_file_subs)
|
||||
|
||||
def get_auth_token():
|
||||
url = host + "/auth/token/"
|
||||
print("Using URL: ", url)
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def send_data_to_api(csv_file_path, token, batch_size=1000):
|
||||
url = host + "/api/v1/migrateSubscriptions"
|
||||
print("Using URL: ", url)
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
for i in range(0, len(csv_file_path), batch_size):
|
||||
batch = csv_file_path[i:i + batch_size]
|
||||
try:
|
||||
# Send CSV file with data as multipart form-data
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
|
||||
# Check response status
|
||||
if response.status_code == 201:
|
||||
response_data = response.json()
|
||||
# Print the message
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print(f"Batch {i // batch_size + 1} sent successfully to the API")
|
||||
else:
|
||||
# Print the error message and response content
|
||||
print(f"Failed to send batch {i // batch_size + 1} to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Print the general error message
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Get the file path from the command-line arguments or default path
|
||||
csv_file_path = get_dynamic_path()
|
||||
|
||||
# Obtain the authentication token
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
# Send the data to the API in batches with authentication
|
||||
send_data_to_api(csv_file_path, token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,65 @@
|
|||
import os
|
||||
import requests
|
||||
import datetime
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
# Environment variables
|
||||
host = os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
# Construct dynamic CSV file path
|
||||
def get_dynamic_path():
|
||||
today_date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
path = os.path.join("C:\\Humbingo\\to_humbingo", today_date, "trxn.csv")
|
||||
return path
|
||||
|
||||
# Authenticate and get token
|
||||
def get_auth_token():
|
||||
url = host + "/auth/token/"
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
# Send CSV data to the API
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
url = host + "/api/v1/migrateTransactions"
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
try:
|
||||
with open(csv_file_path, 'rb') as csvfile:
|
||||
files = {'file': csvfile}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 201:
|
||||
print(f"Message from server: {response.json().get('message')}")
|
||||
elif response.status_code == 401:
|
||||
print("Token expired. Re-authenticating...")
|
||||
new_token = get_auth_token()
|
||||
if new_token:
|
||||
send_data_to_api(csv_file_path, new_token)
|
||||
else:
|
||||
print("Failed to re-authenticate.")
|
||||
else:
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
|
||||
# Main execution
|
||||
if __name__ == '__main__':
|
||||
token = get_auth_token()
|
||||
if not token:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
||||
exit()
|
||||
|
||||
csv_file_path = get_dynamic_path()
|
||||
if os.path.exists(csv_file_path):
|
||||
send_data_to_api(csv_file_path, token)
|
||||
else:
|
||||
print(f"CSV file {csv_file_path} not found.")
|
|
@ -0,0 +1,56 @@
|
|||
import requests
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from datetime import datetime
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
# Environment Variables
|
||||
host = os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
def get_dynamic_path():
|
||||
"""Get today's date in the format YYYYMMDD and return the dynamic path for 'users.csv'."""
|
||||
today_date = datetime.now().strftime("%Y%m%d")
|
||||
path = os.path.join("C:\\Humbingo\\to_humbingo", today_date, "users.csv")
|
||||
return path
|
||||
|
||||
def get_auth_token():
|
||||
"""Obtain an authentication token from the API."""
|
||||
url = f"{host}/auth/token/"
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error obtaining auth token: {e}")
|
||||
return None
|
||||
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
"""Send the CSV file to the API endpoint."""
|
||||
url = f"{host}/api/v1/migrateUsers"
|
||||
try:
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
else:
|
||||
print(f"Failed to send CSV file to the API. Status code: {response.status_code}")
|
||||
print(f"Response content: {response.content.decode('utf-8')}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
csv_path = get_dynamic_path()
|
||||
|
||||
print("Sending data to the API...")
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
send_data_to_api(csv_path, token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,30 @@
|
|||
@echo off
|
||||
|
||||
REM Activate the virtual environment
|
||||
call .\env\Scripts\activate
|
||||
|
||||
REM Run migrate_groups.py
|
||||
echo Migrating Groups
|
||||
python migrate_groups.py
|
||||
echo Group migration completed.
|
||||
|
||||
REM Run migrate_users.py
|
||||
echo Migrating Users
|
||||
python migrate_users.py
|
||||
echo User migration completed.
|
||||
|
||||
REM Run migrate_subscriptions.py
|
||||
echo Migrating Subscriptions
|
||||
python migrate_subscriptions.py
|
||||
echo Subscription migration completed.
|
||||
|
||||
REM Run migrate_transactions.py
|
||||
echo Migrating Transactions
|
||||
python migrate_transactions.py
|
||||
echo Transaction migration completed.
|
||||
|
||||
REM Deactivate the virtual environment
|
||||
call .\env\Scripts\deactivate.bat
|
||||
|
||||
echo All migrations have been run.
|
||||
pause
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "Migrating Groups"
|
||||
python3 -c 'import migrate_groups'
|
||||
echo "Group migration completed."
|
||||
|
||||
echo "Migrating Users"
|
||||
python3 -c 'import migrate_users'
|
||||
echo "User migration completed."
|
||||
|
||||
echo "Migrating Subscriptions"
|
||||
python3 -c 'import migrate_subscriptions'
|
||||
echo "Subscription migration completed."
|
||||
|
||||
echo "Migrating Transactions"
|
||||
python3 -c 'import migrate_transactions'
|
||||
echo "Transaction migration completed."
|
||||
|
||||
echo "All migrations have been run."
|
|
@ -0,0 +1,133 @@
|
|||
import sys
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
from datetime import date, datetime
|
||||
import csv
|
||||
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
host=os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
src_file_grp = os.getenv("FILE_ROOT")+os.getenv("GROUP_FILE")
|
||||
src_file_pa = os.getenv("FILE_ROOT")+os.getenv("PEN_FILE")
|
||||
print("Reading files: ", src_file_grp, src_file_pa)
|
||||
def convert_dates_to_strings(data):
|
||||
"""
|
||||
Recursively convert date and datetime objects in the data to strings.
|
||||
"""
|
||||
if isinstance(data, list):
|
||||
return [convert_dates_to_strings(item) for item in data]
|
||||
elif isinstance(data, dict):
|
||||
return {key: convert_dates_to_strings(value) for key, value in data.items()}
|
||||
elif isinstance(data, (date, datetime)):
|
||||
return data.isoformat()
|
||||
else:
|
||||
return data
|
||||
|
||||
def read_dbf_file(file_path, columns, encoding='latin1'):
|
||||
data = []
|
||||
try:
|
||||
with DBF(file_path, encoding=encoding) as dbf:
|
||||
unique_actp_values = set()
|
||||
for record in dbf:
|
||||
filtered_record = {column: record.get(column) for column in columns}
|
||||
actp_value = filtered_record.get('ACTP')
|
||||
if actp_value not in unique_actp_values:
|
||||
unique_actp_values.add(actp_value)
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error reading DBF file:", e)
|
||||
return data
|
||||
|
||||
def read_pa_dbf_file(file_path, columns, filter_column, filter_value, encoding='latin1'):
|
||||
data = []
|
||||
try:
|
||||
with DBF(file_path, encoding=encoding) as dbf:
|
||||
for record in dbf:
|
||||
if record.get(filter_column) == filter_value:
|
||||
filtered_record = {column: record.get(column) for column in columns}
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error reading PA DBF file:", e)
|
||||
return data
|
||||
|
||||
def save_data_to_csv(data, file_path, columns):
|
||||
try:
|
||||
with open(file_path, 'w', newline='', encoding='utf-8') as csv_file:
|
||||
writer = csv.DictWriter(csv_file, fieldnames=columns)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
print(f"CSV saved to {file_path}")
|
||||
except Exception as e:
|
||||
print("Error saving CSV to file:", e)
|
||||
|
||||
def get_auth_token():
|
||||
url = host+"/auth/token/"
|
||||
print("Using URL: "+url)
|
||||
payload = {"phone_no":login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
try:
|
||||
url = host+"/api/v1/migrateGroups"
|
||||
print("Using URL: "+url)
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
else:
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# if len(sys.argv) < 3:
|
||||
# print("Usage: python script.py <dbf_file_path> <pa_dbf_file_path>")
|
||||
# else:
|
||||
# dbf_file_path = sys.argv[1]
|
||||
# pa_dbf_file_path = sys.argv[2]
|
||||
dbf_file_path = src_file_grp
|
||||
pa_dbf_file_path = src_file_pa
|
||||
|
||||
# Columns to extract from the main DBF file
|
||||
columns_to_extract = ["ACTP", "MINBAL", "RDATE", "NOFI"]
|
||||
dbf_data = read_dbf_file(dbf_file_path, columns_to_extract)
|
||||
|
||||
# Columns to extract from the PA DBF file
|
||||
pa_columns_to_extract = ["PA_TP", "PA_CD", "PA_NA"]
|
||||
pa_data = read_pa_dbf_file(pa_dbf_file_path, pa_columns_to_extract, "PA_CD", "InstPenalAmt")
|
||||
|
||||
# Create a dictionary to map PA_TP to PA_NA where PA_CD = "InstPenalAmt"
|
||||
pa_dict = {record['PA_TP']: record['PA_NA'] for record in pa_data}
|
||||
|
||||
# Add the PA_NA values to the dbf_data
|
||||
for record in dbf_data:
|
||||
actp = record.get('ACTP')
|
||||
record['PA_NA'] = pa_dict.get(actp, '')
|
||||
|
||||
# Save the merged data to groups.csv
|
||||
columns_to_extract.append("PA_NA")
|
||||
save_data_to_csv(dbf_data, 'groups.csv', columns_to_extract)
|
||||
|
||||
print("Sending groups data to the API...")
|
||||
|
||||
# Obtain the authentication token
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
# Call the function to send data to the API with authentication
|
||||
send_data_to_api('groups.csv', token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,135 @@
|
|||
import sys
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
from datetime import date, datetime
|
||||
import csv
|
||||
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
host=os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
src_file_subs = os.getenv("FILE_ROOT")+os.getenv("SUBSCRIPTION_FILE")
|
||||
print("Reading files: ", src_file_subs)
|
||||
|
||||
def read_dbf_file(file_path, columns):
|
||||
data = []
|
||||
try:
|
||||
# Open the .dbf file
|
||||
with DBF(file_path) as dbf:
|
||||
# Iterate over each record in the .dbf file
|
||||
for record in dbf:
|
||||
# Filter the record to include only the specified columns
|
||||
filtered_record = {column: record.get(column) for column in columns}
|
||||
|
||||
# Clean up the ACNO field by removing spaces and keeping only numeric characters
|
||||
if filtered_record.get('ACNO'):
|
||||
filtered_record['ACNO'] = ''.join(filter(str.isdigit, filtered_record['ACNO']))
|
||||
|
||||
# Clean up the PHONE field by keeping only numeric characters
|
||||
if filtered_record.get('PHONE'):
|
||||
filtered_record['PHONE'] = ''.join(filter(str.isdigit, filtered_record['PHONE']))
|
||||
|
||||
# Check if NAME2 is empty
|
||||
if not filtered_record.get('NAME2'):
|
||||
# If NAME2 is empty, check if PHONE has a value
|
||||
if filtered_record.get('PHONE'):
|
||||
filtered_record['NAME2'] = filtered_record['PHONE']
|
||||
else:
|
||||
# If both NAME2 and PHONE are empty, skip this record
|
||||
continue
|
||||
|
||||
# Check if PHONE length is not equal to 10
|
||||
if len(filtered_record.get('PHONE', '')) != 10:
|
||||
continue
|
||||
|
||||
# Merge [ADD1, ADD2, ADD3, PLACE, PIN] into one column "Address"
|
||||
address_components = [record.get(col, '') for col in ['ADD1', 'ADD2', 'ADD3', 'PLACE', 'PIN']]
|
||||
filtered_record['Address'] = '-'.join(filter(None, address_components))
|
||||
|
||||
# Remove [ADD1, ADD2, ADD3, PLACE, PIN]
|
||||
for col in ['ADD1', 'ADD2', 'ADD3', 'PLACE', 'PIN']:
|
||||
del filtered_record[col]
|
||||
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
return data
|
||||
|
||||
def save_data_to_csv(data, file_path, columns):
|
||||
try:
|
||||
# Remove unnecessary columns from the fieldnames
|
||||
columns = [col for col in columns if col not in ["ADD1", "ADD2", "ADD3", "PLACE", "PIN", 'Address', 'NAME2']]
|
||||
|
||||
# Remove unnecessary columns from each record in the data
|
||||
for record in data:
|
||||
for field in ["ADD1", "ADD2", "ADD3", "PLACE", "PIN", 'Address', 'NAME2']:
|
||||
if field in record:
|
||||
del record[field]
|
||||
|
||||
with open(file_path, 'w', newline='') as csv_file:
|
||||
writer = csv.DictWriter(csv_file, fieldnames=columns)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
print(f"CSV saved to {file_path}")
|
||||
except Exception as e:
|
||||
print("Error saving CSV to file:", e)
|
||||
|
||||
def get_auth_token():
|
||||
url = host+"/auth/token/"
|
||||
print("Using URL: ", url)
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def send_data_to_api(data, token, batch_size=1000):
|
||||
url = host+"/api/v1/migrateSubscriptions"
|
||||
print("Using URL: ", url)
|
||||
csv_file_path = 'batch.csv'
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
for i in range(0, len(data), batch_size):
|
||||
batch = data[i:i+batch_size]
|
||||
save_data_to_csv(batch, csv_file_path, batch[0].keys())
|
||||
try:
|
||||
# Send CSV file with data as multipart form-data
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
|
||||
# Check response status
|
||||
if response.status_code == 201:
|
||||
response_data = response.json()
|
||||
# Print the message
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print(f"Batch {i//batch_size + 1} sent successfully to the API")
|
||||
else:
|
||||
# Print the error message and response content
|
||||
print(f"Failed to send batch {i//batch_size + 1} to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Print the general error message
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check if the file path is provided as a command-line argument
|
||||
# if len(sys.argv) < 2:
|
||||
# print("Usage: python script.py DE2.DBF")
|
||||
# else:
|
||||
# Get the file path from the command-line arguments
|
||||
dbf_file_path = src_file_subs
|
||||
# Specify the columns to be extracted
|
||||
columns_to_extract = ["ACTP", "ACNO", "PHONE", "ADD1", "ADD2", "ADD3", "PLACE", "PIN"]
|
||||
# Call the function to read the .dbf file with specific columns
|
||||
dbf_data = read_dbf_file(dbf_file_path, columns_to_extract)
|
||||
# Obtain the authentication token
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
# Send the data to the API in batches with authentication
|
||||
send_data_to_api(dbf_data, token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,162 @@
|
|||
import sys
|
||||
import os
|
||||
import glob
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
import csv
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
host=os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
|
||||
src_file_trxn = os.getenv("FILE_ROOT")+os.getenv("TXN_DIR")
|
||||
print("Reading files: ", src_file_trxn)
|
||||
|
||||
def get_auth_token():
|
||||
url = host+"/auth/token/"
|
||||
print("Using URL: ", url)
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def read_dbf_file(file_path, columns):
|
||||
data = []
|
||||
try:
|
||||
with DBF(file_path, ignore_missing_memofile=True, encoding='latin1') as dbf: # Set encoding to handle non-ASCII characters
|
||||
for record in dbf:
|
||||
filtered_record = {column: str(record.get(column)).strip() for column in columns}
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
return data
|
||||
|
||||
def process_data(data, date_str):
|
||||
processed_data = []
|
||||
transaction_map = {}
|
||||
|
||||
# Parse the date from the file name
|
||||
try:
|
||||
file_date = datetime.strptime(date_str, '%y%m%d')
|
||||
except ValueError:
|
||||
print(f"Error parsing date from {date_str}. Skipping file.")
|
||||
return []
|
||||
|
||||
for record in data:
|
||||
if record.get('VOTP') == 'GL':
|
||||
continue
|
||||
|
||||
if record.get('VOTP') == 'PEN':
|
||||
vonar_parts = record.get('VONAR').split(' ')
|
||||
identifier = vonar_parts[-1] if len(vonar_parts) > 0 else ''
|
||||
parts = identifier.split('/')
|
||||
if len(parts) == 2:
|
||||
totp, voac = parts
|
||||
key = (totp, voac)
|
||||
if key in transaction_map:
|
||||
transaction_map[key]['PEN'] = record.get('VOAMT')
|
||||
else:
|
||||
key = (record.get('VOTP'), record.get('VOAC'))
|
||||
record['PEN'] = '0'
|
||||
|
||||
# Combine date and time for the transaction_at column
|
||||
transaction_time = record.get('TM', '0000')
|
||||
if transaction_time and transaction_time != '0000':
|
||||
if ':' in transaction_time:
|
||||
try:
|
||||
transaction_hour, transaction_minute = map(int, transaction_time.split(':'))
|
||||
except ValueError:
|
||||
transaction_hour, transaction_minute = 0, 0
|
||||
else:
|
||||
try:
|
||||
transaction_hour = int(transaction_time[:2])
|
||||
transaction_minute = int(transaction_time[2:])
|
||||
except ValueError:
|
||||
transaction_hour, transaction_minute = 0, 0
|
||||
else:
|
||||
transaction_hour, transaction_minute = 0, 0
|
||||
|
||||
transaction_datetime = file_date + timedelta(hours=transaction_hour, minutes=transaction_minute)
|
||||
record['transaction_at'] = transaction_datetime
|
||||
|
||||
transaction_map[key] = record
|
||||
processed_data.append(record)
|
||||
|
||||
return processed_data
|
||||
|
||||
def save_data_to_csv(data, file_path, columns):
|
||||
try:
|
||||
if 'PEN' not in columns:
|
||||
columns.append('PEN')
|
||||
if 'transaction_at' not in columns:
|
||||
columns.append('transaction_at')
|
||||
|
||||
with open(file_path, 'w', newline='', encoding='utf-8') as csv_file: # Set encoding to utf-8
|
||||
writer = csv.DictWriter(csv_file, fieldnames=columns)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
print(f"CSV saved to {file_path}")
|
||||
except Exception as e:
|
||||
print("Error saving CSV to file:", e)
|
||||
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
url = host+"/api/v1/migrateTransactions"
|
||||
print("Using URL: "+ url)
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
try:
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 201:
|
||||
response_data = response.json()
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
elif response.status_code == 401:
|
||||
print("Token expired. Re-authenticating...")
|
||||
new_token = get_auth_token()
|
||||
if new_token:
|
||||
send_data_to_api(csv_file_path, new_token)
|
||||
else:
|
||||
print("Failed to re-authenticate. Cannot send data to API.")
|
||||
else:
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
|
||||
def process_files_in_folder(folder_path):
|
||||
files = glob.glob(os.path.join(folder_path, 'DT*.DBF'))
|
||||
if not files:
|
||||
print("No matching files found.")
|
||||
return
|
||||
|
||||
token = get_auth_token()
|
||||
if not token:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
||||
return
|
||||
|
||||
columns_to_extract = ["VOTP", "VOAC", "VOAMT", "VONAR", "TM"]
|
||||
|
||||
for dbf_file_path in files:
|
||||
base_filename = os.path.basename(dbf_file_path)
|
||||
date_str = base_filename[2:8] # Extract the date part from the base filename
|
||||
dbf_data = read_dbf_file(dbf_file_path, columns_to_extract)
|
||||
processed_data = process_data(dbf_data, date_str)
|
||||
if processed_data:
|
||||
csv_file_path = dbf_file_path.replace('.DBF', '.csv')
|
||||
save_data_to_csv(processed_data, csv_file_path, columns_to_extract)
|
||||
send_data_to_api(csv_file_path, token)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# if len(sys.argv) < 2:
|
||||
# print("Usage: python script.py <folder_path>")
|
||||
# else:
|
||||
folder_path = src_file_trxn
|
||||
process_files_in_folder(folder_path)
|
|
@ -0,0 +1,157 @@
|
|||
import sys
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
from datetime import date, datetime
|
||||
import csv
|
||||
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
host=os.getenv("HOST")
|
||||
login_user = os.getenv("USER")
|
||||
login_pass = os.getenv("PASS")
|
||||
src_file_usr = os.getenv("FILE_ROOT")+os.getenv("USER_FILE")
|
||||
print("Reading files: ", src_file_usr)
|
||||
|
||||
def convert_dates_to_strings(data):
|
||||
"""
|
||||
Recursively convert date and datetime objects in the data to strings.
|
||||
"""
|
||||
if isinstance(data, list):
|
||||
return [convert_dates_to_strings(item) for item in data]
|
||||
elif isinstance(data, dict):
|
||||
return {key: convert_dates_to_strings(value) for key, value in data.items()}
|
||||
elif isinstance(data, (date, datetime)):
|
||||
return data.isoformat()
|
||||
else:
|
||||
return data
|
||||
|
||||
def read_dbf_file(file_path, columns):
|
||||
data = []
|
||||
phone_numbers_seen = set()
|
||||
try:
|
||||
# Open the .dbf file
|
||||
with DBF(file_path) as dbf:
|
||||
# Iterate over each record in the .dbf file
|
||||
for record in dbf:
|
||||
# Filter the record to include only the specified columns
|
||||
filtered_record = {column: record.get(column) for column in columns}
|
||||
|
||||
# Clean up the ACNO field by removing spaces and keeping only numeric characters
|
||||
if filtered_record.get('ACNO'):
|
||||
filtered_record['ACNO'] = ''.join(filter(str.isdigit, filtered_record['ACNO']))
|
||||
|
||||
# Clean up the PHONE field by keeping only numeric characters
|
||||
if filtered_record.get('PHONE'):
|
||||
filtered_record['PHONE'] = ''.join(filter(str.isdigit, filtered_record['PHONE']))
|
||||
|
||||
# Check if NAME2 is empty
|
||||
if not filtered_record.get('NAME2'):
|
||||
# If NAME2 is empty, check if PHONE has a value
|
||||
if filtered_record.get('PHONE'):
|
||||
filtered_record['NAME2'] = filtered_record['PHONE']
|
||||
else:
|
||||
# If both NAME2 and PHONE are empty, skip this record
|
||||
continue
|
||||
|
||||
# Check if PHONE length is not equal to 10
|
||||
if len(filtered_record.get('PHONE', '')) != 10:
|
||||
continue
|
||||
|
||||
# Skip records with duplicate phone numbers
|
||||
if filtered_record.get('PHONE') in phone_numbers_seen:
|
||||
continue
|
||||
|
||||
phone_numbers_seen.add(filtered_record.get('PHONE'))
|
||||
|
||||
# Merge [ADD1, ADD2, ADD3, PLACE, PIN] into one column "Address"
|
||||
address_components = [record.get(col, '') for col in ['ADD1', 'ADD2', 'ADD3', 'PLACE', 'PIN']]
|
||||
filtered_record['ADDR'] = '-'.join(filter(None, address_components))
|
||||
|
||||
# Remove [ADD1, ADD2, ADD3, PLACE, PIN]
|
||||
for col in ['ADD1', 'ADD2', 'ADD3', 'PLACE', 'PIN']:
|
||||
del filtered_record[col]
|
||||
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
return data
|
||||
|
||||
def save_data_to_csv(data, file_path, columns):
|
||||
try:
|
||||
# Append "ADDR" to the list of columns if it's not already included
|
||||
if "ADDR" not in columns:
|
||||
columns.append("ADDR")
|
||||
|
||||
# Remove "ADD1", "ADD2", "ADD3", "PLACE", "PIN" from columns list
|
||||
for col in ["ADD1", "ADD2", "ADD3", "PLACE", "PIN"]:
|
||||
if col in columns:
|
||||
columns.remove(col)
|
||||
|
||||
with open(file_path, 'w', newline='') as csv_file:
|
||||
writer = csv.DictWriter(csv_file, fieldnames=columns)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
print(f"CSV saved to {file_path}")
|
||||
except Exception as e:
|
||||
print("Error saving CSV to file:", e)
|
||||
|
||||
def get_auth_token():
|
||||
url = host+"/auth/token/"
|
||||
print("Using URL: ", url)
|
||||
payload = {"phone_no": login_user, "password": login_pass}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print("Error obtaining auth token:", e)
|
||||
return None
|
||||
|
||||
def send_data_to_api(csv_file_path, token):
|
||||
try:
|
||||
# API endpoint
|
||||
url = host+"/api/v1/migrateUsers"
|
||||
print("Using URL: ", url)
|
||||
# Send CSV file with data as multipart form-data
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
|
||||
# Check response status
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
# Print the message
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
else:
|
||||
# Print the error message and response content
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Print the general error message
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check if the file path is provided as a command-line argument
|
||||
# if len(sys.argv) < 2:
|
||||
# print("Usage: python script.py DE2.DBF")
|
||||
# else:
|
||||
# Get the file path from the command-line arguments
|
||||
dbf_file_path = src_file_usr
|
||||
# Specify the columns to be extracted
|
||||
columns_to_extract = ["ACTP", "ACNO", "NAME2", "ADD1", "ADD2", "ADD3", "PLACE", "PIN", "PHONE"]
|
||||
# Call the function to read the .dbf file with specific columns
|
||||
dbf_data = read_dbf_file(dbf_file_path, columns_to_extract)
|
||||
# Save the data to a CSV file
|
||||
save_data_to_csv(dbf_data, 'users.csv', columns_to_extract)
|
||||
# Print the data as JSON array
|
||||
print("Sending data to the API...")
|
||||
|
||||
# Obtain the authentication token
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
# Call the function to send data to the API with authentication
|
||||
send_data_to_api('users.csv', token)
|
||||
else:
|
||||
print("Failed to obtain auth token, cannot send data to API.")
|
|
@ -0,0 +1,83 @@
|
|||
import sys
|
||||
import requests
|
||||
from dbfread import DBF
|
||||
from datetime import date, datetime
|
||||
import csv
|
||||
|
||||
def convert_dates_to_strings(data):
|
||||
"""
|
||||
Recursively convert date and datetime objects in the data to strings.
|
||||
"""
|
||||
if isinstance(data, list):
|
||||
return [convert_dates_to_strings(item) for item in data]
|
||||
elif isinstance(data, dict):
|
||||
return {key: convert_dates_to_strings(value) for key, value in data.items()}
|
||||
elif isinstance(data, (date, datetime)):
|
||||
return data.isoformat()
|
||||
else:
|
||||
return data
|
||||
|
||||
def read_dbf_file(file_path, columns):
|
||||
data = []
|
||||
try:
|
||||
# Open the .dbf file
|
||||
with DBF(file_path) as dbf:
|
||||
# Iterate over each record in the .dbf file
|
||||
for record in dbf:
|
||||
# Filter the record to include only the specified columns
|
||||
filtered_record = {column: record.get(column) for column in columns}
|
||||
data.append(filtered_record)
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
return data
|
||||
|
||||
def save_data_to_csv(data, file_path, columns):
|
||||
try:
|
||||
with open(file_path, 'w', newline='') as csv_file:
|
||||
writer = csv.DictWriter(csv_file, fieldnames=columns)
|
||||
writer.writeheader()
|
||||
writer.writerows(data)
|
||||
print(f"CSV saved to {file_path}")
|
||||
except Exception as e:
|
||||
print("Error saving CSV to file:", e)
|
||||
|
||||
def send_data_to_api(csv_file_path):
|
||||
try:
|
||||
# API endpoint
|
||||
url = "https://127.0.0.1/api/v1/migrateUsers"
|
||||
|
||||
# Send CSV file with data as multipart form-data
|
||||
files = {'file': open(csv_file_path, 'rb')}
|
||||
response = requests.post(url, files=files)
|
||||
|
||||
# Check response status
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
# Print the message
|
||||
print(f"Message from server: {response_data.get('message')}")
|
||||
print("CSV file sent successfully to the API")
|
||||
else:
|
||||
# Print the error message and response content
|
||||
print("Failed to send CSV file to the API. Status code:", response.status_code)
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Print the general error message
|
||||
print("Error:", e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check if the file path is provided as a command-line argument
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python script.py <dbf_file_path>")
|
||||
else:
|
||||
# Get the file path from the command-line arguments
|
||||
dbf_file_path = sys.argv[1]
|
||||
# Specify the columns to be extracted
|
||||
columns_to_extract = ["ACTP", "ACNO", "NAME2", "ADD1", "ADD2", "ADD3", "PLACE", "PIN", "PHONE"]
|
||||
# Call the function to read the .dbf file with specific columns
|
||||
dbf_data = read_dbf_file(dbf_file_path, columns_to_extract)
|
||||
# Save the data to a CSV file
|
||||
save_data_to_csv(dbf_data, 'users.csv', columns_to_extract)
|
||||
# Print the data as JSON array
|
||||
print("Sending data to the API...")
|
||||
# Call the function to send data to the API
|
||||
send_data_to_api('users.csv')
|
|
@ -0,0 +1,24 @@
|
|||
import subprocess
|
||||
import sys
|
||||
|
||||
def run_script(script_name):
|
||||
"""Run a script and print its output."""
|
||||
try:
|
||||
result = subprocess.run([sys.executable, script_name], capture_output=True, text=True)
|
||||
print(f"Running {script_name}...\n")
|
||||
print(result.stdout)
|
||||
if result.stderr:
|
||||
print(f"Error in {script_name}:\n{result.stderr}")
|
||||
except Exception as e:
|
||||
print(f"Failed to run {script_name}: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
scripts = [
|
||||
'm_group.py',
|
||||
'm_user.py',
|
||||
'm_subscription.py',
|
||||
'm_transactions.py'
|
||||
]
|
||||
|
||||
for script in scripts:
|
||||
run_script(script)
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
HOST = os.getenv("HOST")
|
||||
USER = os.getenv("USER")
|
||||
PASS = os.getenv("PASS")
|
||||
CSV_FILE_PATH = r'C:\Humbingo\to_humbingo\transactions.csv'
|
||||
|
||||
def get_auth_token():
|
||||
url = f"{HOST}/auth/token/"
|
||||
payload = {"phone_no": USER, "password": PASS}
|
||||
try:
|
||||
response = requests.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get('access')
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error obtaining auth token: {e}")
|
||||
return None
|
||||
|
||||
def send_csv_to_server(csv_file_path, token):
|
||||
url = f"{HOST}/api/v1/uploadTransaction/"
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
try:
|
||||
with open(csv_file_path, 'rb') as file:
|
||||
files = {'file': file}
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
if response.status_code == 201:
|
||||
print("CSV file sent successfully to the API")
|
||||
print("Server response:", response.json())
|
||||
else:
|
||||
print(f"Failed to send CSV file to the API. Status code: {response.status_code}")
|
||||
print("Response content:", response.content.decode('utf-8'))
|
||||
except Exception as e:
|
||||
print(f"Error sending CSV file to server: {e}")
|
||||
|
||||
def main():
|
||||
token = get_auth_token()
|
||||
if token:
|
||||
send_csv_to_server(CSV_FILE_PATH, token)
|
||||
else:
|
||||
print("Failed to obtain auth token.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,275 @@
|
|||
VOTP,VOAC,VOAMT,VONAR,TM,PEN,transaction_at
|
||||
7F,391,600.0,BY CASH 1,07:35,20.0,2023-12-16 07:35:00
|
||||
5U,162,600.0,BY CASH 2,07:35,20.0,2023-12-16 07:35:00
|
||||
5T,162,600.0,BY CASH 3,07:36,20.0,2023-12-16 07:36:00
|
||||
6T,343,400.0,BY CASH 4,07:36,10.0,2023-12-16 07:36:00
|
||||
7H,567,600.0,BY CASH 5,08:06,20.0,2023-12-16 08:06:00
|
||||
7H,567,600.0,BY CASH 6,08:08,0,2023-12-16 08:08:00
|
||||
7I,501,600.0,BY CASH 7,08:28,20.0,2023-12-16 08:28:00
|
||||
7J,78,600.0,BY CASH 8,08:41,20.0,2023-12-16 08:41:00
|
||||
7J,551,600.0,BY CASH 9,08:41,20.0,2023-12-16 08:41:00
|
||||
7J,679,600.0,BY CASH 10,08:41,20.0,2023-12-16 08:41:00
|
||||
5X,482,1200.0,BY CASH 11,08:51,20.0,2023-12-16 08:51:00
|
||||
5W,455,600.0,BY CASH 12,08:51,20.0,2023-12-16 08:51:00
|
||||
8C,366,400.0,BY CASH 13,09:25,10.0,2023-12-16 09:25:00
|
||||
6Q,198,400.0,BY CASH 14,09:26,10.0,2023-12-16 09:26:00
|
||||
7B,102,600.0,BY CASH 15,09:37,20.0,2023-12-16 09:37:00
|
||||
7B,202,600.0,BY CASH 16,09:37,20.0,2023-12-16 09:37:00
|
||||
6S,94,400.0,BY CASH 17,09:37,10.0,2023-12-16 09:37:00
|
||||
6U,89,400.0,BY CASH 18,09:37,10.0,2023-12-16 09:37:00
|
||||
8D,107,400.0,BY CASH 19,09:37,10.0,2023-12-16 09:37:00
|
||||
5U,303,600.0,BY CASH 20,09:37,20.0,2023-12-16 09:37:00
|
||||
7H,533,1200.0,BY CASH 21,09:45,60.0,2023-12-16 09:45:00
|
||||
8D,668,400.0,BY CASH 22,09:47,10.0,2023-12-16 09:47:00
|
||||
L12,305,600.0,BY CASH 23,09:52,30.0,2023-12-16 09:52:00
|
||||
8C,460,400.0,BY CASH 24,09:52,10.0,2023-12-16 09:52:00
|
||||
6T,203,400.0,BY CASH 25,09:52,10.0,2023-12-16 09:52:00
|
||||
6R,292,400.0,BY CASH 26,09:52,10.0,2023-12-16 09:52:00
|
||||
7L,306,600.0,BY CASH 27,10:11,20.0,2023-12-16 10:11:00
|
||||
7L,514,600.0,BY CASH 28,10:11,20.0,2023-12-16 10:11:00
|
||||
8C,623,400.0,BY CASH 29,10:11,10.0,2023-12-16 10:11:00
|
||||
8C,581,400.0,BY CASH 30,10:11,10.0,2023-12-16 10:11:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:12,0,2023-12-16 10:12:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:12,0,2023-12-16 10:12:00
|
||||
8D,366,400.0,BY CASH 31,10:12,10.0,2023-12-16 10:12:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:13,0,2023-12-16 10:13:00
|
||||
7K,548,1200.0,BY CASH 32,10:13,60.0,2023-12-16 10:13:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:13,0,2023-12-16 10:13:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:14,0,2023-12-16 10:14:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP.,10:14,0,2023-12-16 10:14:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:16,0,2023-12-16 10:16:00
|
||||
6X,171,300.0,BY CASH 33,10:16,10.0,2023-12-16 10:16:00
|
||||
6X,170,300.0,BY CASH 34,10:16,10.0,2023-12-16 10:16:00
|
||||
6M,201,300.0,BY CASH 35,10:16,10.0,2023-12-16 10:16:00
|
||||
6M,68,300.0,BY CASH 36,10:16,10.0,2023-12-16 10:16:00
|
||||
6X,362,300.0,BY CASH 37,10:16,10.0,2023-12-16 10:16:00
|
||||
7J,239,4525.0,TR. VISH.VRUD.-7J EXP. P,10:16,0,2023-12-16 10:16:00
|
||||
8D,613,400.0,BY CASH 38,10:16,10.0,2023-12-16 10:16:00
|
||||
6N,219,400.0,BY CASH 39,10:16,10.0,2023-12-16 10:16:00
|
||||
7J,239,-44000.0,TO TR. SBI,10:16,0,2023-12-16 10:16:00
|
||||
8C,425,400.0,BY CASH 40,10:19,10.0,2023-12-16 10:19:00
|
||||
6E,324,-4500.0,TO CASH SELF,10:20,0,2023-12-16 10:20:00
|
||||
6E,363,1300.0,BY TR. PAYBLE EXP.,10:20,0,2023-12-16 10:20:00
|
||||
6E,363,-22000.0,TO TR. BOB,10:21,0,2023-12-16 10:21:00
|
||||
5Z,480,600.0,BY CASH 41,10:27,20.0,2023-12-16 10:27:00
|
||||
5T,303,600.0,BY CASH 42,10:27,20.0,2023-12-16 10:27:00
|
||||
6X,505,300.0,BY CASH 43,10:27,10.0,2023-12-16 10:27:00
|
||||
6E,371,400.0,BY TR. MOTIVATION EXP.,10:32,0,2023-12-16 10:32:00
|
||||
6E,371,1300.0,BY TR. PAYBLE EXP.,10:32,0,2023-12-16 10:32:00
|
||||
6E,371,-6005.0,TO TR. NL/3738-MUKESHBHA,10:32,0,2023-12-16 10:32:00
|
||||
NL,3738,6005.0,TR. 6E/371 RATHOD MUKESH,10:32,0,2023-12-16 10:32:00
|
||||
6E,371,-16395.0,TO TR. BOB,10:32,0,2023-12-16 10:32:00
|
||||
6N,171,400.0,BY CASH 44,10:41,10.0,2023-12-16 10:41:00
|
||||
7A,485,1200.0,BY CASH 45,10:42,60.0,2023-12-16 10:42:00
|
||||
7A,484,1200.0,BY CASH 46,10:42,60.0,2023-12-16 10:42:00
|
||||
7M,370,600.0,BY CASH 47,10:43,20.0,2023-12-16 10:43:00
|
||||
7M,394,600.0,BY CASH 48,10:43,20.0,2023-12-16 10:43:00
|
||||
6U,488,400.0,BY CASH 49,10:47,10.0,2023-12-16 10:47:00
|
||||
7H,366,600.0,BY CASH 50,10:47,20.0,2023-12-16 10:47:00
|
||||
5W,145,1200.0,BY CASH 51,10:49,60.0,2023-12-16 10:49:00
|
||||
5X,382,600.0,BY CASH 52,10:53,20.0,2023-12-16 10:53:00
|
||||
7E,402,600.0,BY CASH 53,10:53,20.0,2023-12-16 10:53:00
|
||||
8D,112,400.0,BY CASH 54,10:53,10.0,2023-12-16 10:53:00
|
||||
6U,465,800.0,BY CASH 55,11:03,30.0,2023-12-16 11:03:00
|
||||
7A,196,1200.0,BY CASH 56,11:03,60.0,2023-12-16 11:03:00
|
||||
6Q,482,400.0,BY CASH 57,11:04,10.0,2023-12-16 11:04:00
|
||||
6E,328,-15600.0,TO CASH SELF,11:12,0,2023-12-16 11:12:00
|
||||
6E,340,-4500.0,TO CASH SELF,11:12,0,2023-12-16 11:12:00
|
||||
5X,396,600.0,BY CASH 58,11:13,20.0,2023-12-16 11:13:00
|
||||
7E,250,600.0,BY CASH 59,11:14,20.0,2023-12-16 11:14:00
|
||||
8B,572,400.0,BY CASH 60,11:17,10.0,2023-12-16 11:17:00
|
||||
6S,43,400.0,BY CASH 61,11:17,10.0,2023-12-16 11:17:00
|
||||
8D,683,400.0,BY CASH 62,11:17,10.0,2023-12-16 11:17:00
|
||||
8D,667,400.0,BY CASH 63,11:17,10.0,2023-12-16 11:17:00
|
||||
5Z,403,600.0,BY CASH 64,11:17,20.0,2023-12-16 11:17:00
|
||||
6K,429,300.0,BY CASH 65,11:17,10.0,2023-12-16 11:17:00
|
||||
6V,221,400.0,BY CASH 66,11:17,10.0,2023-12-16 11:17:00
|
||||
6Q,286,400.0,BY CASH 67,11:17,10.0,2023-12-16 11:17:00
|
||||
8D,74,400.0,BY CASH 68,11:17,10.0,2023-12-16 11:17:00
|
||||
6M,244,300.0,BY CASH 69,11:17,10.0,2023-12-16 11:17:00
|
||||
L12,216,200.0,BY CASH 70,11:17,5.0,2023-12-16 11:17:00
|
||||
6X,234,300.0,BY CASH 71,11:17,10.0,2023-12-16 11:17:00
|
||||
6X,288,300.0,BY CASH 72,11:17,10.0,2023-12-16 11:17:00
|
||||
6X,277,300.0,BY CASH 73,11:18,10.0,2023-12-16 11:18:00
|
||||
8D,161,400.0,BY CASH 74,11:20,10.0,2023-12-16 11:20:00
|
||||
6S,445,400.0,BY CASH 75,11:20,10.0,2023-12-16 11:20:00
|
||||
6Z,318,400.0,BY CASH 76,11:20,10.0,2023-12-16 11:20:00
|
||||
6K,205,300.0,BY CASH 77,11:21,10.0,2023-12-16 11:21:00
|
||||
6E,126,400.0,BY TR. MOTIVATION EXP.,11:23,0,2023-12-16 11:23:00
|
||||
6E,126,1300.0,BY TR. PAYBLE EXP.,11:23,0,2023-12-16 11:23:00
|
||||
6E,126,-22400.0,TO TR. AU BANK,11:23,0,2023-12-16 11:23:00
|
||||
6E,355,400.0,BY TR. MOTIVATION EXP.,11:23,0,2023-12-16 11:23:00
|
||||
6E,355,1300.0,BY TR. PAYBLE EXP.,11:23,0,2023-12-16 11:23:00
|
||||
6E,355,-22400.0,TO TR. BOB,11:23,0,2023-12-16 11:23:00
|
||||
6E,359,400.0,BY TR. MOTIVATION EXP.,11:24,0,2023-12-16 11:24:00
|
||||
6E,359,1300.0,BY TR. PAYBLE EXP.,11:24,0,2023-12-16 11:24:00
|
||||
6E,359,-22400.0,TO TR. BOB,11:24,0,2023-12-16 11:24:00
|
||||
6E,317,400.0,BY TR. MOTIVATION EXP.,11:24,0,2023-12-16 11:24:00
|
||||
6E,317,1300.0,BY TR. PAYBLE EXP.,11:24,0,2023-12-16 11:24:00
|
||||
5Q,166,600.0,BY CASH 78,11:24,20.0,2023-12-16 11:24:00
|
||||
5W,88,600.0,BY CASH 79,11:24,20.0,2023-12-16 11:24:00
|
||||
6E,317,-22400.0,TO TR. BOB,11:24,0,2023-12-16 11:24:00
|
||||
6E,28,400.0,BY TR. MOTIVATION EXP.,11:25,0,2023-12-16 11:25:00
|
||||
6E,28,1300.0,BY TR. PAYBLE EXP.,11:25,0,2023-12-16 11:25:00
|
||||
6E,28,-22400.0,TO TR. SBI,11:25,0,2023-12-16 11:25:00
|
||||
6E,451,1300.0,BY TR. PAYBLE EXP.,11:25,0,2023-12-16 11:25:00
|
||||
6E,451,-22000.0,TO TR. SBI,11:25,0,2023-12-16 11:25:00
|
||||
6E,367,1300.0,BY TR. PAYBLE EXP.,11:25,0,2023-12-16 11:25:00
|
||||
6E,367,-22000.0,TO TR. SBI,11:26,0,2023-12-16 11:26:00
|
||||
6O,2,400.0,BY CASH 80,11:27,10.0,2023-12-16 11:27:00
|
||||
8B,29,400.0,BY CASH 81,11:27,10.0,2023-12-16 11:27:00
|
||||
6O,3,400.0,BY CASH 82,11:27,10.0,2023-12-16 11:27:00
|
||||
8A,40,400.0,BY CASH 83,11:27,10.0,2023-12-16 11:27:00
|
||||
5Q,2,600.0,BY CASH 84,11:27,20.0,2023-12-16 11:27:00
|
||||
5Q,1,600.0,BY CASH 85,11:27,20.0,2023-12-16 11:27:00
|
||||
5U,120,600.0,BY CASH 86,11:27,20.0,2023-12-16 11:27:00
|
||||
7L,191,600.0,BY CASH 87,11:28,20.0,2023-12-16 11:28:00
|
||||
6E,95,1300.0,BY TR. PAYBLE EXP.,11:32,0,2023-12-16 11:32:00
|
||||
6E,95,-22000.0,TO TR. UNION BANK,11:32,0,2023-12-16 11:32:00
|
||||
6G,400,300.0,BY CASH 88,11:37,10.0,2023-12-16 11:37:00
|
||||
7L,612,600.0,BY CASH 89,11:37,20.0,2023-12-16 11:37:00
|
||||
5V,405,1200.0,BY CASH 90,11:44,60.0,2023-12-16 11:44:00
|
||||
7G,346,600.0,BY CASH 91,11:44,20.0,2023-12-16 11:44:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:45,0,2023-12-16 11:45:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:46,0,2023-12-16 11:46:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:47,0,2023-12-16 11:47:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:47,0,2023-12-16 11:47:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:48,0,2023-12-16 11:48:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:48,0,2023-12-16 11:48:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP.,11:49,0,2023-12-16 11:49:00
|
||||
7M,398,4622.0,TR. VISH.VRUD.-7M EXP. P,11:50,0,2023-12-16 11:50:00
|
||||
7M,398,4624.0,TR. VISH.VRUD.-7M EXP. P,11:50,0,2023-12-16 11:50:00
|
||||
7M,398,-44000.0,TO TR. CBI,11:51,0,2023-12-16 11:51:00
|
||||
5P,352,600.0,BY CASH 92,11:54,20.0,2023-12-16 11:54:00
|
||||
5P,221,600.0,BY CASH 93,11:54,20.0,2023-12-16 11:54:00
|
||||
6S,425,3933.0,TR. VISH.VRUD.-6S EXP. P,11:56,0,2023-12-16 11:56:00
|
||||
6S,425,3933.0,TR. VISH.VRUD.-6S EXP. P,11:57,0,2023-12-16 11:57:00
|
||||
6S,425,3934.0,TR. VISH.VRUD.-6S EXP. P,11:58,0,2023-12-16 11:58:00
|
||||
6S,425,-29400.0,TO TR. AU BANK,11:59,0,2023-12-16 11:59:00
|
||||
8A,206,4200.0,TR. VISH.VRUD.-8A EXP. T,12:01,0,2023-12-16 12:01:00
|
||||
8A,206,4200.0,TR. VISH.VRUD.-8A EXP. T,12:01,0,2023-12-16 12:01:00
|
||||
8A,206,4200.0,TR. VISH.VRUD.-8A EXP. T,12:02,0,2023-12-16 12:02:00
|
||||
8A,206,4200.0,TR. VISH.VRUD.-8A EXP. T,12:03,0,2023-12-16 12:03:00
|
||||
7I,129,1200.0,BY CASH 94,12:03,60.0,2023-12-16 12:03:00
|
||||
8A,206,4200.0,TR. VISH.VRUD.-8A EXP. T,12:03,0,2023-12-16 12:03:00
|
||||
8A,206,-29000.0,TO TR. AU BANK,12:05,0,2023-12-16 12:05:00
|
||||
6E,270,400.0,BY TR. MOTIVATION EXP.,12:06,0,2023-12-16 12:06:00
|
||||
6E,270,1300.0,BY TR. PAYBLE EXP.,12:06,0,2023-12-16 12:06:00
|
||||
6E,270,-22400.0,TO TR. AU BANK,12:06,0,2023-12-16 12:06:00
|
||||
5Q,314,600.0,BY CASH 95,12:07,20.0,2023-12-16 12:07:00
|
||||
5P,347,1200.0,BY CASH 96,12:10,0,2023-12-16 12:10:00
|
||||
7L,103,600.0,BY CASH 97,12:11,20.0,2023-12-16 12:11:00
|
||||
8C,171,400.0,BY CASH 98,12:11,10.0,2023-12-16 12:11:00
|
||||
8A,231,400.0,BY CASH 99,12:17,20.0,2023-12-16 12:17:00
|
||||
8B,585,400.0,BY CASH 100,12:25,10.0,2023-12-16 12:25:00
|
||||
L12,58,200.0,BY CASH 101,12:37,5.0,2023-12-16 12:37:00
|
||||
7G,674,600.0,BY CASH 102,12:37,20.0,2023-12-16 12:37:00
|
||||
7G,549,600.0,BY CASH 103,12:37,20.0,2023-12-16 12:37:00
|
||||
7L,44,600.0,BY CASH 104,12:37,20.0,2023-12-16 12:37:00
|
||||
7L,42,600.0,BY CASH 105,12:37,20.0,2023-12-16 12:37:00
|
||||
7L,610,600.0,BY CASH 106,12:37,20.0,2023-12-16 12:37:00
|
||||
7L,43,600.0,BY CASH 107,12:38,20.0,2023-12-16 12:38:00
|
||||
7L,45,600.0,BY CASH 108,12:38,20.0,2023-12-16 12:38:00
|
||||
7L,611,600.0,BY CASH 109,12:38,20.0,2023-12-16 12:38:00
|
||||
6H,442,400.0,BY CASH 110,12:38,10.0,2023-12-16 12:38:00
|
||||
8D,554,400.0,BY CASH 111,12:38,10.0,2023-12-16 12:38:00
|
||||
8C,21,400.0,BY CASH 112,12:38,10.0,2023-12-16 12:38:00
|
||||
6H,441,400.0,BY CASH 113,12:38,10.0,2023-12-16 12:38:00
|
||||
8D,553,400.0,BY CASH 114,12:38,10.0,2023-12-16 12:38:00
|
||||
8C,22,400.0,BY CASH 115,12:38,10.0,2023-12-16 12:38:00
|
||||
6Y,486,400.0,BY CASH 116,12:39,10.0,2023-12-16 12:39:00
|
||||
7J,681,600.0,BY CASH 117,12:40,20.0,2023-12-16 12:40:00
|
||||
7J,105,600.0,BY CASH 118,12:45,20.0,2023-12-16 12:45:00
|
||||
7J,104,600.0,BY CASH 119,12:45,20.0,2023-12-16 12:45:00
|
||||
7G,115,600.0,BY CASH 120,12:45,20.0,2023-12-16 12:45:00
|
||||
6H,474,800.0,BY CASH 121,12:46,10.0,2023-12-16 12:46:00
|
||||
6I,297,400.0,BY CASH 122,12:47,10.0,2023-12-16 12:47:00
|
||||
5Q,466,600.0,BY CASH 123,12:47,20.0,2023-12-16 12:47:00
|
||||
6T,494,400.0,BY CASH 124,12:48,10.0,2023-12-16 12:48:00
|
||||
6T,546,400.0,BY CASH 125,12:48,10.0,2023-12-16 12:48:00
|
||||
6T,519,400.0,BY CASH 126,12:49,10.0,2023-12-16 12:49:00
|
||||
8D,244,400.0,BY CASH 127,12:49,10.0,2023-12-16 12:49:00
|
||||
5Z,244,600.0,BY CASH 128,12:49,20.0,2023-12-16 12:49:00
|
||||
7F,102,1200.0,BY CASH 129,12:51,60.0,2023-12-16 12:51:00
|
||||
5Z,15,6600.0,BY CASH 130,12:56,1320.0,2023-12-16 12:56:00
|
||||
6I,282,400.0,BY CASH 131,12:57,10.0,2023-12-16 12:57:00
|
||||
6L,373,400.0,BY CASH 132,12:57,10.0,2023-12-16 12:57:00
|
||||
6Q,36,400.0,BY CASH 133,12:57,10.0,2023-12-16 12:57:00
|
||||
7I,451,600.0,BY CASH 134,12:57,20.0,2023-12-16 12:57:00
|
||||
6V,348,400.0,BY CASH 135,13:01,10.0,2023-12-16 13:01:00
|
||||
5O,193,600.0,BY CASH 136,13:02,20.0,2023-12-16 13:02:00
|
||||
5O,400,600.0,BY CASH 137,13:02,20.0,2023-12-16 13:02:00
|
||||
5P,202,600.0,BY CASH 138,13:02,20.0,2023-12-16 13:02:00
|
||||
8A,613,400.0,BY CASH 139,13:02,10.0,2023-12-16 13:02:00
|
||||
6H,112,400.0,BY CASH 140,13:02,10.0,2023-12-16 13:02:00
|
||||
6E,375,400.0,BY TR. MOTIVATION EXP.,13:02,0,2023-12-16 13:02:00
|
||||
6E,375,1300.0,BY TR. PAYBLE EXP.,13:02,0,2023-12-16 13:02:00
|
||||
L11,129,200.0,BY CASH 141,13:02,5.0,2023-12-16 13:02:00
|
||||
L11,413,200.0,BY CASH 142,13:02,5.0,2023-12-16 13:02:00
|
||||
6E,375,-22400.0,TO TR. CBI,13:02,0,2023-12-16 13:02:00
|
||||
6T,76,400.0,BY CASH 143,13:03,10.0,2023-12-16 13:03:00
|
||||
7K,28,600.0,BY CASH 144,13:05,20.0,2023-12-16 13:05:00
|
||||
5P,324,1800.0,BY CASH 145,13:06,120.0,2023-12-16 13:06:00
|
||||
6Y,102,1600.0,BY CASH 146,13:08,100.0,2023-12-16 13:08:00
|
||||
6I,409,1600.0,BY CASH 147,13:08,100.0,2023-12-16 13:08:00
|
||||
5W,288,1200.0,BY CASH 148,13:08,60.0,2023-12-16 13:08:00
|
||||
7D,576,4233.0,TR. VISH.VRUD.-7D EXP. P,13:08,0,2023-12-16 13:08:00
|
||||
7E,404,600.0,BY CASH 149,13:09,40.0,2023-12-16 13:09:00
|
||||
7D,576,4233.0,TR. VISH.VRUD.-7D EXP. P,13:12,0,2023-12-16 13:12:00
|
||||
7D,576,4233.0,TR. VISH.VRUD.-7D EXP. P,13:12,0,2023-12-16 13:12:00
|
||||
7D,576,4233.0,TR. VISH.VRUD.-7D EXP. P,13:13,0,2023-12-16 13:13:00
|
||||
7D,576,4233.0,TR. VISH.VRUD.-7D EXP. P,13:13,0,2023-12-16 13:13:00
|
||||
7D,576,4235.0,TR. VISH.VRUD.-7D EXP. P,13:14,0,2023-12-16 13:14:00
|
||||
7D,576,-44000.0,TO TR. AU BANK,13:14,0,2023-12-16 13:14:00
|
||||
6G,18,300.0,BY CASH 150,13:15,0,2023-12-16 13:15:00
|
||||
6V,26,400.0,BY CASH 151,13:16,10.0,2023-12-16 13:16:00
|
||||
6Y,248,400.0,BY CASH 152,13:17,10.0,2023-12-16 13:17:00
|
||||
6Y,333,400.0,BY CASH 153,13:17,10.0,2023-12-16 13:17:00
|
||||
7C,364,600.0,BY CASH 154,13:34,20.0,2023-12-16 13:34:00
|
||||
7F,163,600.0,BY CASH 155,13:35,20.0,2023-12-16 13:35:00
|
||||
8D,117,400.0,BY CASH 156,13:35,10.0,2023-12-16 13:35:00
|
||||
7J,109,600.0,BY CASH 157,13:37,20.0,2023-12-16 13:37:00
|
||||
7B,420,600.0,BY CASH 158,13:37,20.0,2023-12-16 13:37:00
|
||||
7J,109,600.0,BY CASH 159,13:37,0,2023-12-16 13:37:00
|
||||
7B,420,600.0,BY CASH 160,13:37,0,2023-12-16 13:37:00
|
||||
L13,353,200.0,BY CASH 161,13:39,5.0,2023-12-16 13:39:00
|
||||
7K,326,600.0,BY CASH 162,13:39,20.0,2023-12-16 13:39:00
|
||||
5Y,392,600.0,BY CASH 163,13:40,20.0,2023-12-16 13:40:00
|
||||
7G,211,600.0,BY CASH 164,13:40,20.0,2023-12-16 13:40:00
|
||||
5W,461,600.0,BY CASH 165,13:40,20.0,2023-12-16 13:40:00
|
||||
5W,362,600.0,BY CASH 166,13:40,20.0,2023-12-16 13:40:00
|
||||
5T,328,1200.0,BY CASH 167,13:42,60.0,2023-12-16 13:42:00
|
||||
7J,363,600.0,BY CASH 168,13:55,20.0,2023-12-16 13:55:00
|
||||
7J,506,600.0,BY CASH 169,13:55,20.0,2023-12-16 13:55:00
|
||||
6L,259,400.0,BY CASH 170,13:57,10.0,2023-12-16 13:57:00
|
||||
6L,309,400.0,BY CASH 171,13:57,10.0,2023-12-16 13:57:00
|
||||
CA,586,-1800.0,TO CASH SELF,14:04,0,2023-12-16 14:04:00
|
||||
6E,356,1300.0,BY TR. PAYBLE EXP.,14:20,0,2023-12-16 14:20:00
|
||||
6E,356,-15305.0,TO TR. NL\2940-RATHOD AM,14:21,0,2023-12-16 14:21:00
|
||||
NL,2940,15305.0,TR.6E/356 RATHOD AMITBHA,14:21,0,2023-12-16 14:21:00
|
||||
6E,356,-6695.0,TO CASH SELF,14:21,0,2023-12-16 14:21:00
|
||||
5U,307,600.0,BY CASH 172,14:24,20.0,2023-12-16 14:24:00
|
||||
5U,78,600.0,BY CASH 173,14:24,20.0,2023-12-16 14:24:00
|
||||
8D,45,400.0,BY CASH 174,14:28,10.0,2023-12-16 14:28:00
|
||||
NL,3952,-10000.0,TO CASH LOAN,15:28,0,2023-12-16 15:28:00
|
||||
NL,3738,-330.0,TO TR. INTEREST ->,15:28,0,2023-12-16 15:28:00
|
||||
NL,3738,-675.0,TO CASH LOAN,15:28,0,2023-12-16 15:28:00
|
||||
NL,3617,-651.0,TO TR. INTEREST ->,15:28,0,2023-12-16 15:28:00
|
||||
NL,3617,30651.0,BY CASH 5001,15:29,0,2023-12-16 15:29:00
|
||||
NL,2357,-39.0,TO TR. INTEREST ->,15:29,0,2023-12-16 15:29:00
|
||||
NL,2357,620.0,BY CASH 5002,15:29,0,2023-12-16 15:29:00
|
||||
NL,1634,-29.0,TO TR. INTEREST ->,15:29,0,2023-12-16 15:29:00
|
||||
NL,1634,-2971.0,TO CASH LOAN,15:29,0,2023-12-16 15:29:00
|
||||
NL,3951,-15000.0,TO CASH LOAN,15:30,0,2023-12-16 15:30:00
|
||||
NL,3619,-155.0,TO TR. INTEREST ->,15:31,0,2023-12-16 15:31:00
|
||||
NL,3619,10155.0,BY CASH 5003,15:31,0,2023-12-16 15:31:00
|
||||
NL,3856,-112.0,TO TR. INTEREST ->,15:31,0,2023-12-16 15:31:00
|
||||
NL,3856,5112.0,BY CASH 5004,15:31,0,2023-12-16 15:31:00
|
||||
NL,3334,-375.0,TO TR. INTEREST ->,15:32,0,2023-12-16 15:32:00
|
||||
NL,3334,1375.0,BY CASH 5005,15:32,0,2023-12-16 15:32:00
|
||||
NL,89,-15.0,TO TR. INTEREST ->,15:32,0,2023-12-16 15:32:00
|
||||
NL,89,15.0,BY CASH 5006,15:32,0,2023-12-16 15:32:00
|
||||
NL,3682,-110.0,TO TR. INTEREST ->,15:32,0,2023-12-16 15:32:00
|
||||
NL,3682,110.0,BY CASH 5007,15:32,0,2023-12-16 15:32:00
|
||||
NL,2940,-3464.0,TO TR. INTEREST ->,15:32,0,2023-12-16 15:32:00
|
||||
NL,2940,-6841.0,TO CASH LOAN,15:33,0,2023-12-16 15:33:00
|
|
Loading…
Reference in New Issue