0209review: firm risk component + 2way interation on n_max_trial and cap_limit_level

This commit is contained in:
Ethan Hao
2026-02-09 20:54:52 -08:00
parent 208b36114f
commit 492e5db7f2
26 changed files with 2233 additions and 57 deletions

3
.gitignore vendored
View File

@@ -1 +1,4 @@
.idea/ .idea/
venv
# intermediate file for analysis_firm_risk_component.ipynb (2GB)
count_with_gfirm.csv

2
.vscode/launch.json vendored
View File

@@ -8,7 +8,7 @@
"name": "Python: Current File", "name": "Python: Current File",
"type": "python", "type": "python",
"request": "launch", "request": "launch",
"program": "C:\\Users\\ASUS\\OneDrive\\Project\\ScrAbm\\Dissertation\\IIabm\\main.py", "program": "${workspaceFolder}\\main.py",
"console": "integratedTerminal", "console": "integratedTerminal",
"justMyCode": true, "justMyCode": true,
"args": [ "args": [

View File

@@ -0,0 +1,43 @@
-- SQL script to duplicate without_exp tables from iiabmdb20260127 to iiabmdb
-- This allows reusing previous experiment results as the baseline for new experiments
-- Drop existing tables if they exist (optional - comment out if you want to keep existing data)
DROP TABLE IF EXISTS iiabmdb.without_exp_result;
DROP TABLE IF EXISTS iiabmdb.without_exp_sample;
DROP TABLE IF EXISTS iiabmdb.without_exp_experiment;
-- Create and copy without_exp_experiment table
CREATE TABLE iiabmdb.without_exp_experiment LIKE iiabmdb20260127.without_exp_experiment;
INSERT INTO
iiabmdb.without_exp_experiment
SELECT *
FROM iiabmdb20260127.without_exp_experiment;
-- Create and copy without_exp_sample table
CREATE TABLE iiabmdb.without_exp_sample LIKE iiabmdb20260127.without_exp_sample;
INSERT INTO
iiabmdb.without_exp_sample
SELECT *
FROM iiabmdb20260127.without_exp_sample;
-- Create and copy without_exp_result table
CREATE TABLE iiabmdb.without_exp_result LIKE iiabmdb20260127.without_exp_result;
INSERT INTO
iiabmdb.without_exp_result
SELECT *
FROM iiabmdb20260127.without_exp_result;
-- Verify the copy
SELECT 'without_exp_experiment' as table_name, COUNT(*) as row_count
FROM iiabmdb.without_exp_experiment
UNION ALL
SELECT 'without_exp_sample', COUNT(*)
FROM iiabmdb.without_exp_sample
UNION ALL
SELECT 'without_exp_result', COUNT(*)
FROM iiabmdb.without_exp_result;

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,10 @@
idx_scenario,n_max_trial,prf_size,prf_conn,cap_limit_prob_type,cap_limit_level,diff_new_conn,remove_t,netw_prf_n,mean_count_firm_prod,mean_count_firm,mean_count_prod,mean_max_ts_firm_prod,mean_max_ts_firm,mean_max_ts_prod,mean_n_remove_firm_prod,mean_n_all_prod_remove_firm,mean_end_ts
0,7,1,1,uniform,5.0000,0.5000,5,2,2.6036,2.6036,2.0808,1.0808,1.0808,1.0808,0.6044,0.2131,1.8726
1,7,1,1,uniform,10.0000,0.5000,5,2,2.6817,2.6798,2.1509,1.1509,1.1509,1.1509,0.8539,0.2994,2.2829
2,7,1,1,uniform,15.0000,0.5000,5,2,2.7181,2.7154,2.1827,1.1827,1.1827,1.1827,1.0015,0.3398,2.5124
3,5,1,1,uniform,5.0000,0.5000,5,2,2.6036,2.6036,2.0808,1.0808,1.0808,1.0808,0.6044,0.2131,1.8726
4,5,1,1,uniform,10.0000,0.5000,5,2,2.6832,2.6813,2.1518,1.1518,1.1518,1.1518,0.8543,0.2996,2.2840
5,5,1,1,uniform,15.0000,0.5000,5,2,2.7196,2.7168,2.1838,1.1838,1.1838,1.1838,1.0046,0.3406,2.5189
6,3,1,1,uniform,5.0000,0.5000,5,2,2.6042,2.6042,2.0815,1.0815,1.0815,1.0815,0.6046,0.2131,1.8754
7,3,1,1,uniform,10.0000,0.5000,5,2,2.6891,2.6872,2.1575,1.1575,1.1575,1.1575,0.8674,0.3057,2.3349
8,3,1,1,uniform,15.0000,0.5000,5,2,2.7280,2.7253,2.1928,1.1928,1.1928,1.1928,1.0196,0.3469,2.5918
1 idx_scenario n_max_trial prf_size prf_conn cap_limit_prob_type cap_limit_level diff_new_conn remove_t netw_prf_n mean_count_firm_prod mean_count_firm mean_count_prod mean_max_ts_firm_prod mean_max_ts_firm mean_max_ts_prod mean_n_remove_firm_prod mean_n_all_prod_remove_firm mean_end_ts
2 0 7 1 1 uniform 5.0000 0.5000 5 2 2.6036 2.6036 2.0808 1.0808 1.0808 1.0808 0.6044 0.2131 1.8726
3 1 7 1 1 uniform 10.0000 0.5000 5 2 2.6817 2.6798 2.1509 1.1509 1.1509 1.1509 0.8539 0.2994 2.2829
4 2 7 1 1 uniform 15.0000 0.5000 5 2 2.7181 2.7154 2.1827 1.1827 1.1827 1.1827 1.0015 0.3398 2.5124
5 3 5 1 1 uniform 5.0000 0.5000 5 2 2.6036 2.6036 2.0808 1.0808 1.0808 1.0808 0.6044 0.2131 1.8726
6 4 5 1 1 uniform 10.0000 0.5000 5 2 2.6832 2.6813 2.1518 1.1518 1.1518 1.1518 0.8543 0.2996 2.2840
7 5 5 1 1 uniform 15.0000 0.5000 5 2 2.7196 2.7168 2.1838 1.1838 1.1838 1.1838 1.0046 0.3406 2.5189
8 6 3 1 1 uniform 5.0000 0.5000 5 2 2.6042 2.6042 2.0815 1.0815 1.0815 1.0815 0.6046 0.2131 1.8754
9 7 3 1 1 uniform 10.0000 0.5000 5 2 2.6891 2.6872 2.1575 1.1575 1.1575 1.1575 0.8674 0.3057 2.3349
10 8 3 1 1 uniform 15.0000 0.5000 5 2 2.7280 2.7253 2.1928 1.1928 1.1928 1.1928 1.0196 0.3469 2.5918

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,5 @@
from orm import engine from orm import engine
from sqlalchemy import text
import pandas as pd import pandas as pd
import networkx as nx import networkx as nx
import json import json
@@ -12,8 +13,8 @@ BomNodes = pd.read_csv('BomNodes.csv', index_col=0)
with open('SQL_analysis_risk.sql', 'r') as f: with open('SQL_analysis_risk.sql', 'r') as f:
str_sql = f.read() str_sql = f.read()
result = pd.read_sql(sql=str_sql, with engine.connect() as conn:
con=engine) result = pd.read_sql(sql=text(str_sql), con=conn)
result.to_csv('analysis\\count.csv', result.to_csv('analysis\\count.csv',
index=False, index=False,
encoding='utf-8-sig') encoding='utf-8-sig')
@@ -23,10 +24,11 @@ print(result)
plt.rcParams['font.sans-serif'] = 'SimHei' plt.rcParams['font.sans-serif'] = 'SimHei'
exp_id = 1 exp_id = 1
G_bom_str = pd.read_sql( with engine.connect() as conn:
sql=f'select g_bom from iiabmdb.without_exp_experiment ' G_bom_str = pd.read_sql(
f'where id = {exp_id};', sql=text(f'select g_bom from iiabmdb.without_exp_experiment '
con=engine)['g_bom'].tolist()[0] f'where id = {exp_id};'),
con=conn)['g_bom'].tolist()[0]
G_bom = nx.adjacency_graph(json.loads(G_bom_str)) G_bom = nx.adjacency_graph(json.loads(G_bom_str))
pos = nx.nx_agraph.graphviz_layout(G_bom, prog="twopi", args="") pos = nx.nx_agraph.graphviz_layout(G_bom, prog="twopi", args="")
node_labels = nx.get_node_attributes(G_bom, 'Name') node_labels = nx.get_node_attributes(G_bom, 'Name')
@@ -44,9 +46,10 @@ plt.close()
plt.rcParams['font.sans-serif'] = 'SimHei' plt.rcParams['font.sans-serif'] = 'SimHei'
sample_id = 1 sample_id = 1
G_firm_str = pd.read_sql( with engine.connect() as conn:
sql=f'select g_firm from iiabmdb.without_exp_sample where id = {exp_id};', G_firm_str = pd.read_sql(
con=engine)['g_firm'].tolist()[0] sql=text(f'select g_firm from iiabmdb.without_exp_sample where id = {exp_id};'),
con=conn)['g_firm'].tolist()[0]
G_firm = nx.adjacency_graph(json.loads(G_firm_str)) G_firm = nx.adjacency_graph(json.loads(G_firm_str))
pos = nx.nx_agraph.graphviz_layout(G_firm, prog="twopi", args="") pos = nx.nx_agraph.graphviz_layout(G_firm, prog="twopi", args="")
node_label = nx.get_node_attributes(G_firm, 'Name') node_label = nx.get_node_attributes(G_firm, 'Name')

View File

@@ -2,7 +2,7 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 6, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {

19
check_db.py Normal file
View File

@@ -0,0 +1,19 @@
from orm import engine
from sqlalchemy import text
import pandas as pd
import networkx as nx
import json
import matplotlib.pyplot as plt
# prep data
Firm = pd.read_csv("Firm_amended.csv")
Firm['Code'] = Firm['Code'].astype('string')
Firm.fillna(0, inplace=True)
BomNodes = pd.read_csv('BomNodes.csv', index_col=0)
with open('SQL_analysis_risk.sql', 'r') as f:
str_sql = f.read()
str_sql = str_sql.replace('iiabmdb', 'iiabmdb20260127')
with engine.connect() as conn:
result = pd.read_sql(sql=text(str_sql), con=conn)
print(result)

View File

@@ -1,5 +1,6 @@
import os import os
import datetime import datetime
import time
from model import Model from model import Model
@@ -21,6 +22,9 @@ class Computation:
# lock this row by update is_done_flag to 0 # lock this row by update is_done_flag to 0
self.c_db.lock_the_sample(sample_random) self.c_db.lock_the_sample(sample_random)
# Record start time
start_time = time.time()
print( print(
f"Pid {self.pid} ({str_code}) is running " f"Pid {self.pid} ({str_code}) is running "
f"sample {sample_random.id} at {datetime.datetime.now()}") f"sample {sample_random.id} at {datetime.datetime.now()}")
@@ -34,6 +38,14 @@ class Computation:
**dct_exp} **dct_exp}
model = Model(dct_sample_para) model = Model(dct_sample_para)
results = model.run(display=False) results = model.run(display=False)
# Calculate elapsed time
elapsed_time = time.time() - start_time
print(
f"Pid {self.pid} ({str_code}) completed "
f"sample {sample_random.id} in {elapsed_time:.2f} seconds "
f"(stopped at t={sample_random.stop_t})")
return False return False

View File

@@ -56,7 +56,8 @@ class ControllerDB:
if self.is_with_exp: if self.is_with_exp:
with open('SQL_export_high_risk_setting.sql', 'r') as f: with open('SQL_export_high_risk_setting.sql', 'r') as f:
str_sql = f.read() str_sql = f.read()
result = pd.read_sql(sql=str_sql, con=engine) with engine.connect() as conn:
result = pd.read_sql(sql=text(str_sql), con=conn)
result['dct_lst_init_disrupt_firm_prod'] = \ result['dct_lst_init_disrupt_firm_prod'] = \
result['dct_lst_init_disrupt_firm_prod'].apply( result['dct_lst_init_disrupt_firm_prod'].apply(
lambda x: pickle.loads(x)) lambda x: pickle.loads(x))
@@ -261,5 +262,8 @@ class ControllerDB:
@staticmethod @staticmethod
def lock_the_sample(sample: Sample): def lock_the_sample(sample: Sample):
sample.is_done_flag, sample.computer_name = 0, platform.node() from datetime import datetime
sample.is_done_flag = 0
sample.computer_name = platform.node()
sample.ts_start = datetime.now()
db_session.commit() db_session.commit()

View File

@@ -447,6 +447,7 @@ class Model(ap.Model):
def end(self): def end(self):
# print('/' * 20, 'output', '/' * 20) # print('/' * 20, 'output', '/' * 20)
from datetime import datetime
qry_result = db_session.query(Result).filter_by(s_id=self.sample.id) qry_result = db_session.query(Result).filter_by(s_id=self.sample.id)
if qry_result.count() == 0: if qry_result.count() == 0:
@@ -468,6 +469,12 @@ class Model(ap.Model):
lst_result_info.append(db_r) lst_result_info.append(db_r)
db_session.bulk_save_objects(lst_result_info) db_session.bulk_save_objects(lst_result_info)
db_session.commit() db_session.commit()
# Calculate elapsed time if ts_start exists
if self.sample.ts_start is not None:
elapsed = (datetime.now() - self.sample.ts_start).total_seconds()
self.sample.elapsed_seconds = elapsed
self.sample.is_done_flag = 1 self.sample.is_done_flag = 1
self.sample.computer_name = platform.node() self.sample.computer_name = platform.node()
self.sample.stop_t = self.int_stop_ts self.sample.stop_t = self.int_stop_ts

View File

@@ -1,37 +1,10 @@
X12,X1,X2,X3,X13,X14,X15,X16,X4,X5,X6,X7,X8,X9,X10,X11,X17,X18,X19,X20,X21,X22,X23 X12,X1,X2,X3,X13,X14,X15,X16
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 0,0,0,0,0,1,1,1
1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1 0,0,0,0,1,1,1,1
2,0,0,0,2,2,2,2,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2 0,0,0,0,2,1,1,1
0,0,0,0,0,0,0,1,0,0,1,1,1,1,1,1,1,1,1,2,2,2,2 1,0,0,0,0,1,1,1
1,0,0,0,1,1,1,2,0,0,1,1,1,1,1,1,2,2,2,0,0,0,0 1,0,0,0,1,1,1,1
2,0,0,0,2,2,2,0,0,0,1,1,1,1,1,1,0,0,0,1,1,1,1 1,0,0,0,2,1,1,1
0,0,0,1,0,1,2,0,1,1,0,0,0,1,1,1,1,2,2,0,1,1,2 2,0,0,0,0,1,1,1
1,0,0,1,1,2,0,1,1,1,0,0,0,1,1,1,2,0,0,1,2,2,0 2,0,0,0,1,1,1,1
2,0,0,1,2,0,1,2,1,1,0,0,0,1,1,1,0,1,1,2,0,0,1 2,0,0,0,2,1,1,1
0,0,1,0,0,2,1,0,1,1,0,1,1,0,0,1,2,1,2,1,0,2,1
1,0,1,0,1,0,2,1,1,1,0,1,1,0,0,1,0,2,0,2,1,0,2
2,0,1,0,2,1,0,2,1,1,0,1,1,0,0,1,1,0,1,0,2,1,0
0,0,1,1,1,2,0,2,0,1,1,0,1,0,1,0,1,0,2,2,1,0,1
1,0,1,1,2,0,1,0,0,1,1,0,1,0,1,0,2,1,0,0,2,1,2
2,0,1,1,0,1,2,1,0,1,1,0,1,0,1,0,0,2,1,1,0,2,0
0,0,1,1,1,2,1,0,1,0,1,1,0,1,0,0,0,2,1,2,2,1,0
1,0,1,1,2,0,2,1,1,0,1,1,0,1,0,0,1,0,2,0,0,2,1
2,0,1,1,0,1,0,2,1,0,1,1,0,1,0,0,2,1,0,1,1,0,2
0,1,0,1,1,0,2,2,1,0,0,1,1,0,1,0,2,0,1,1,0,1,2
1,1,0,1,2,1,0,0,1,0,0,1,1,0,1,0,0,1,2,2,1,2,0
2,1,0,1,0,2,1,1,1,0,0,1,1,0,1,0,1,2,0,0,2,0,1
0,1,0,1,1,1,2,2,0,1,1,1,0,0,0,1,0,1,0,0,2,2,1
1,1,0,1,2,2,0,0,0,1,1,1,0,0,0,1,1,2,1,1,0,0,2
2,1,0,1,0,0,1,1,0,1,1,1,0,0,0,1,2,0,2,2,1,1,0
0,1,0,0,2,1,0,1,1,1,1,0,1,1,0,0,2,2,0,2,0,1,1
1,1,0,0,0,2,1,2,1,1,1,0,1,1,0,0,0,0,1,0,1,2,2
2,1,0,0,1,0,2,0,1,1,1,0,1,1,0,0,1,1,2,1,2,0,0
0,1,1,1,2,1,1,1,0,0,0,0,1,1,0,1,0,0,2,1,2,0,2
1,1,1,1,0,2,2,2,0,0,0,0,1,1,0,1,1,1,0,2,0,1,0
2,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,2,2,1,0,1,2,1
0,1,1,0,2,2,2,1,1,0,1,0,0,0,1,1,2,1,1,0,1,0,0
1,1,1,0,0,0,0,2,1,0,1,0,0,0,1,1,0,2,2,1,2,1,1
2,1,1,0,1,1,1,0,1,0,1,0,0,0,1,1,1,0,0,2,0,2,2
0,1,1,0,2,0,1,2,0,1,0,1,0,1,1,0,1,2,0,1,1,2,0
1,1,1,0,0,1,2,0,0,1,0,1,0,1,1,0,2,0,1,2,2,0,1
2,1,1,0,1,2,0,1,0,1,0,1,0,1,1,0,0,1,2,0,0,1,2
1 X12 X1 X2 X3 X13 X14 X15 X16 X4 X5 X6 X7 X8 X9 X10 X11 X17 X18 X19 X20 X21 X22 X23
2 0 0 0 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
3 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1
4 2 0 0 0 0 2 2 1 2 1 2 1 0 0 0 0 0 0 0 0 2 2 2 2 2 2 2
5 0 1 0 0 0 0 0 1 0 1 1 0 0 1 1 1 1 1 1 1 1 1 2 2 2 2
6 1 0 0 0 1 1 1 2 1 0 0 1 1 1 1 1 1 2 2 2 0 0 0 0
7 2 1 0 0 0 2 2 1 2 1 0 1 0 0 1 1 1 1 1 1 0 0 0 1 1 1 1
8 0 2 0 0 1 0 0 1 2 1 0 1 1 1 0 0 0 1 1 1 1 2 2 0 1 1 2
9 1 2 0 0 1 0 1 2 1 0 1 1 1 1 0 0 0 1 1 1 2 0 0 1 2 2 0
10 2 0 0 1 0 2 0 1 1 2 1 1 1 0 0 0 1 1 1 0 1 1 2 0 0 1
0 0 1 0 0 2 1 0 1 1 0 1 1 0 0 1 2 1 2 1 0 2 1
1 0 1 0 1 0 2 1 1 1 0 1 1 0 0 1 0 2 0 2 1 0 2
2 0 1 0 2 1 0 2 1 1 0 1 1 0 0 1 1 0 1 0 2 1 0
0 0 1 1 1 2 0 2 0 1 1 0 1 0 1 0 1 0 2 2 1 0 1
1 0 1 1 2 0 1 0 0 1 1 0 1 0 1 0 2 1 0 0 2 1 2
2 0 1 1 0 1 2 1 0 1 1 0 1 0 1 0 0 2 1 1 0 2 0
0 0 1 1 1 2 1 0 1 0 1 1 0 1 0 0 0 2 1 2 2 1 0
1 0 1 1 2 0 2 1 1 0 1 1 0 1 0 0 1 0 2 0 0 2 1
2 0 1 1 0 1 0 2 1 0 1 1 0 1 0 0 2 1 0 1 1 0 2
0 1 0 1 1 0 2 2 1 0 0 1 1 0 1 0 2 0 1 1 0 1 2
1 1 0 1 2 1 0 0 1 0 0 1 1 0 1 0 0 1 2 2 1 2 0
2 1 0 1 0 2 1 1 1 0 0 1 1 0 1 0 1 2 0 0 2 0 1
0 1 0 1 1 1 2 2 0 1 1 1 0 0 0 1 0 1 0 0 2 2 1
1 1 0 1 2 2 0 0 0 1 1 1 0 0 0 1 1 2 1 1 0 0 2
2 1 0 1 0 0 1 1 0 1 1 1 0 0 0 1 2 0 2 2 1 1 0
0 1 0 0 2 1 0 1 1 1 1 0 1 1 0 0 2 2 0 2 0 1 1
1 1 0 0 0 2 1 2 1 1 1 0 1 1 0 0 0 0 1 0 1 2 2
2 1 0 0 1 0 2 0 1 1 1 0 1 1 0 0 1 1 2 1 2 0 0
0 1 1 1 2 1 1 1 0 0 0 0 1 1 0 1 0 0 2 1 2 0 2
1 1 1 1 0 2 2 2 0 0 0 0 1 1 0 1 1 1 0 2 0 1 0
2 1 1 1 1 0 0 0 0 0 0 0 1 1 0 1 2 2 1 0 1 2 1
0 1 1 0 2 2 2 1 1 0 1 0 0 0 1 1 2 1 1 0 1 0 0
1 1 1 0 0 0 0 2 1 0 1 0 0 0 1 1 0 2 2 1 2 1 1
2 1 1 0 1 1 1 0 1 0 1 0 0 0 1 1 1 0 0 2 0 2 2
0 1 1 0 2 0 1 2 0 1 0 1 0 1 1 0 1 2 0 1 1 2 0
1 1 1 0 0 1 2 0 0 1 0 1 0 1 1 0 2 0 1 2 2 0 1
2 1 1 0 1 2 0 1 0 1 0 1 0 1 1 0 0 1 2 0 0 1 2

View File

@@ -0,0 +1,15 @@
X12,X1,X2,X3,X13,X14,X15,X16
0,0,0,0,0,1,1,1
0,0,0,0,1,1,1,1
0,0,0,0,2,1,1,1
1,0,0,0,0,1,1,1
1,0,0,0,1,1,1,1
1,0,0,0,2,1,1,1
2,0,0,0,0,1,1,1
2,0,0,0,1,1,1,1
2,0,0,0,2,1,1,1
,,,,,,,
n_max_trial,prf_size,prf_conn,cap_limit_prob_type,cap_limit_level,diff_new_conn,remove_t,netw_prf_n
15,TRUE,TRUE,uniform,5,0.3,3,3
10,FALSE,FALSE,normal,10,0.5,5,2
5,,,,15,0.7,7,1
1 X12 X1 X2 X3 X13 X14 X15 X16
2 0 0 0 0 0 1 1 1
3 0 0 0 0 1 1 1 1
4 0 0 0 0 2 1 1 1
5 1 0 0 0 0 1 1 1
6 1 0 0 0 1 1 1 1
7 1 0 0 0 2 1 1 1
8 2 0 0 0 0 1 1 1
9 2 0 0 0 1 1 1 1
10 2 0 0 0 2 1 1 1
11
12 n_max_trial prf_size prf_conn cap_limit_prob_type cap_limit_level diff_new_conn remove_t netw_prf_n
13 15 TRUE TRUE uniform 5 0.3 3 3
14 10 FALSE FALSE normal 10 0.5 5 2
15 5 15 0.7 7 1

4
orm.py
View File

@@ -22,7 +22,7 @@ with open('conf_db_prefix.yaml') as file:
db_name_prefix = dct_conf_db_prefix['db_name_prefix'] db_name_prefix = dct_conf_db_prefix['db_name_prefix']
str_login = 'mysql://{}:{}@{}:{}/{}'.format(dct_conf_db['user_name'], str_login = 'mysql+pymysql://{}:{}@{}:{}/{}'.format(dct_conf_db['user_name'],
dct_conf_db['password'], dct_conf_db['password'],
dct_conf_db['address'], dct_conf_db['address'],
dct_conf_db['port'], dct_conf_db['port'],
@@ -82,6 +82,8 @@ class Sample(Base):
is_done_flag = Column(Integer, nullable=False) is_done_flag = Column(Integer, nullable=False)
computer_name = Column(String(64), nullable=True) computer_name = Column(String(64), nullable=True)
ts_done = Column(DateTime(timezone=True), onupdate=func.now()) ts_done = Column(DateTime(timezone=True), onupdate=func.now())
ts_start = Column(DateTime(timezone=True), nullable=True)
elapsed_seconds = Column(DECIMAL(10, 2), nullable=True)
stop_t = Column(Integer, nullable=True) stop_t = Column(Integer, nullable=True)
g_firm = Column(Text(4294000000), nullable=True) g_firm = Column(Text(4294000000), nullable=True)

View File

@@ -1,7 +1,7 @@
agentpy==0.1.5 agentpy==0.1.5
alabaster==0.7.13 alabaster==0.7.13
Babel==2.12.1 Babel==2.12.1
certifi @ file:///C:/b/abs_85o_6fm0se/croot/certifi_1671487778835/work/certifi # certifi @ file:///C:/b/abs_85o_6fm0se/croot/certifi_1671487778835/work/certifi
charset-normalizer==3.0.1 charset-normalizer==3.0.1
colorama==0.4.6 colorama==0.4.6
cycler==0.11.0 cycler==0.11.0
@@ -28,7 +28,7 @@ pandas==1.4.1
pandas-stubs==1.2.0.39 pandas-stubs==1.2.0.39
Pillow==9.4.0 Pillow==9.4.0
Pygments==2.14.0 Pygments==2.14.0
pygraphviz @ file:///C:/Users/ASUS/Downloads/pygraphviz-1.9-cp38-cp38-win_amd64.whl # pygraphviz @ file:///C:/Users/ASUS/Downloads/pygraphviz-1.9-cp38-cp38-win_amd64.whl
pyparsing==3.0.9 pyparsing==3.0.9
python-dateutil==2.8.2 python-dateutil==2.8.2
pytz==2022.7.1 pytz==2022.7.1
@@ -50,5 +50,5 @@ traitlets==5.9.0
typing_extensions==4.5.0 typing_extensions==4.5.0
urllib3==1.26.14 urllib3==1.26.14
wincertstore==0.2 wincertstore==0.2
yapf @ file:///tmp/build/80754af9/yapf_1615749224965/work # yapf @ file:///tmp/build/80754af9/yapf_1615749224965/work
zipp==3.15.0 zipp==3.15.0

View File

@@ -1,4 +1,4 @@
n_max_trial,prf_size,prf_conn,cap_limit_prob_type,cap_limit_level,diff_new_conn,remove_t,netw_prf_n n_max_trial,prf_size,prf_conn,cap_limit_prob_type,cap_limit_level,diff_new_conn,remove_t,netw_prf_n
7,TRUE,TRUE,uniform,5,0.3,3,3 7,TRUE,TRUE,uniform,5,0.3,3,3
5,FALSE,FALSE,normal,10,0.5,5,2 5,FALSE,FALSE,normal,10,0.5,5,2
3,,,,15,0.7,7,1 3,,,,15,0.7,7,1
1 n_max_trial prf_size prf_conn cap_limit_prob_type cap_limit_level diff_new_conn remove_t netw_prf_n
2 7 TRUE TRUE uniform 5 0.3 3 3
3 5 FALSE FALSE normal 10 0.5 5 2
4 3 15 0.7 7 1