reconnect moved files to git repo
This commit is contained in:
8
.idea/.gitignore
generated
vendored
Executable file
8
.idea/.gitignore
generated
vendored
Executable file
@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
11
.idea/TimeSeriesAnalysis.iml
generated
Executable file
11
.idea/TimeSeriesAnalysis.iml
generated
Executable file
@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv1" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.13 (TimeSeriesAnalysis)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Executable file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Executable file
@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
7
.idea/misc.xml
generated
Executable file
7
.idea/misc.xml
generated
Executable file
@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.13 (TimeSeriesAnalysis)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (TimeSeriesAnalysis)" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
8
.idea/modules.xml
generated
Executable file
8
.idea/modules.xml
generated
Executable file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/TimeSeriesAnalysis.iml" filepath="$PROJECT_DIR$/.idea/TimeSeriesAnalysis.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
3
README.md
Executable file
3
README.md
Executable file
@ -0,0 +1,3 @@
|
||||
Time Series Analysis
|
||||
|
||||
This is a simple app to analyse time-series data.
|
||||
145
Uploads/airline_passengers.csv
Executable file
145
Uploads/airline_passengers.csv
Executable file
@ -0,0 +1,145 @@
|
||||
"Month","Thousands of Passengers"
|
||||
"1949-01",112
|
||||
"1949-02",118
|
||||
"1949-03",132
|
||||
"1949-04",129
|
||||
"1949-05",121
|
||||
"1949-06",135
|
||||
"1949-07",148
|
||||
"1949-08",148
|
||||
"1949-09",136
|
||||
"1949-10",119
|
||||
"1949-11",104
|
||||
"1949-12",118
|
||||
"1950-01",115
|
||||
"1950-02",126
|
||||
"1950-03",141
|
||||
"1950-04",135
|
||||
"1950-05",125
|
||||
"1950-06",149
|
||||
"1950-07",170
|
||||
"1950-08",170
|
||||
"1950-09",158
|
||||
"1950-10",133
|
||||
"1950-11",114
|
||||
"1950-12",140
|
||||
"1951-01",145
|
||||
"1951-02",150
|
||||
"1951-03",178
|
||||
"1951-04",163
|
||||
"1951-05",172
|
||||
"1951-06",178
|
||||
"1951-07",199
|
||||
"1951-08",199
|
||||
"1951-09",184
|
||||
"1951-10",162
|
||||
"1951-11",146
|
||||
"1951-12",166
|
||||
"1952-01",171
|
||||
"1952-02",180
|
||||
"1952-03",193
|
||||
"1952-04",181
|
||||
"1952-05",183
|
||||
"1952-06",218
|
||||
"1952-07",230
|
||||
"1952-08",242
|
||||
"1952-09",209
|
||||
"1952-10",191
|
||||
"1952-11",172
|
||||
"1952-12",194
|
||||
"1953-01",196
|
||||
"1953-02",196
|
||||
"1953-03",236
|
||||
"1953-04",235
|
||||
"1953-05",229
|
||||
"1953-06",243
|
||||
"1953-07",264
|
||||
"1953-08",272
|
||||
"1953-09",237
|
||||
"1953-10",211
|
||||
"1953-11",180
|
||||
"1953-12",201
|
||||
"1954-01",204
|
||||
"1954-02",188
|
||||
"1954-03",235
|
||||
"1954-04",227
|
||||
"1954-05",234
|
||||
"1954-06",264
|
||||
"1954-07",302
|
||||
"1954-08",293
|
||||
"1954-09",259
|
||||
"1954-10",229
|
||||
"1954-11",203
|
||||
"1954-12",229
|
||||
"1955-01",242
|
||||
"1955-02",233
|
||||
"1955-03",267
|
||||
"1955-04",269
|
||||
"1955-05",270
|
||||
"1955-06",315
|
||||
"1955-07",364
|
||||
"1955-08",347
|
||||
"1955-09",312
|
||||
"1955-10",274
|
||||
"1955-11",237
|
||||
"1955-12",278
|
||||
"1956-01",284
|
||||
"1956-02",277
|
||||
"1956-03",317
|
||||
"1956-04",313
|
||||
"1956-05",318
|
||||
"1956-06",374
|
||||
"1956-07",413
|
||||
"1956-08",405
|
||||
"1956-09",355
|
||||
"1956-10",306
|
||||
"1956-11",271
|
||||
"1956-12",306
|
||||
"1957-01",315
|
||||
"1957-02",301
|
||||
"1957-03",356
|
||||
"1957-04",348
|
||||
"1957-05",355
|
||||
"1957-06",422
|
||||
"1957-07",465
|
||||
"1957-08",467
|
||||
"1957-09",404
|
||||
"1957-10",347
|
||||
"1957-11",305
|
||||
"1957-12",336
|
||||
"1958-01",340
|
||||
"1958-02",318
|
||||
"1958-03",362
|
||||
"1958-04",348
|
||||
"1958-05",363
|
||||
"1958-06",435
|
||||
"1958-07",491
|
||||
"1958-08",505
|
||||
"1958-09",404
|
||||
"1958-10",359
|
||||
"1958-11",310
|
||||
"1958-12",337
|
||||
"1959-01",360
|
||||
"1959-02",342
|
||||
"1959-03",406
|
||||
"1959-04",396
|
||||
"1959-05",420
|
||||
"1959-06",472
|
||||
"1959-07",548
|
||||
"1959-08",559
|
||||
"1959-09",463
|
||||
"1959-10",407
|
||||
"1959-11",362
|
||||
"1959-12",405
|
||||
"1960-01",417
|
||||
"1960-02",391
|
||||
"1960-03",419
|
||||
"1960-04",461
|
||||
"1960-05",472
|
||||
"1960-06",535
|
||||
"1960-07",622
|
||||
"1960-08",606
|
||||
"1960-09",508
|
||||
"1960-10",461
|
||||
"1960-11",390
|
||||
"1960-12",432
|
||||
|
145
Uploads/processed_airline_passengers.csv
Executable file
145
Uploads/processed_airline_passengers.csv
Executable file
@ -0,0 +1,145 @@
|
||||
Month,Thousands of Passengers
|
||||
1949-01-01,112
|
||||
1949-02-01,118
|
||||
1949-03-01,132
|
||||
1949-04-01,129
|
||||
1949-05-01,121
|
||||
1949-06-01,135
|
||||
1949-07-01,148
|
||||
1949-08-01,148
|
||||
1949-09-01,136
|
||||
1949-10-01,119
|
||||
1949-11-01,104
|
||||
1949-12-01,118
|
||||
1950-01-01,115
|
||||
1950-02-01,126
|
||||
1950-03-01,141
|
||||
1950-04-01,135
|
||||
1950-05-01,125
|
||||
1950-06-01,149
|
||||
1950-07-01,170
|
||||
1950-08-01,170
|
||||
1950-09-01,158
|
||||
1950-10-01,133
|
||||
1950-11-01,114
|
||||
1950-12-01,140
|
||||
1951-01-01,145
|
||||
1951-02-01,150
|
||||
1951-03-01,178
|
||||
1951-04-01,163
|
||||
1951-05-01,172
|
||||
1951-06-01,178
|
||||
1951-07-01,199
|
||||
1951-08-01,199
|
||||
1951-09-01,184
|
||||
1951-10-01,162
|
||||
1951-11-01,146
|
||||
1951-12-01,166
|
||||
1952-01-01,171
|
||||
1952-02-01,180
|
||||
1952-03-01,193
|
||||
1952-04-01,181
|
||||
1952-05-01,183
|
||||
1952-06-01,218
|
||||
1952-07-01,230
|
||||
1952-08-01,242
|
||||
1952-09-01,209
|
||||
1952-10-01,191
|
||||
1952-11-01,172
|
||||
1952-12-01,194
|
||||
1953-01-01,196
|
||||
1953-02-01,196
|
||||
1953-03-01,236
|
||||
1953-04-01,235
|
||||
1953-05-01,229
|
||||
1953-06-01,243
|
||||
1953-07-01,264
|
||||
1953-08-01,272
|
||||
1953-09-01,237
|
||||
1953-10-01,211
|
||||
1953-11-01,180
|
||||
1953-12-01,201
|
||||
1954-01-01,204
|
||||
1954-02-01,188
|
||||
1954-03-01,235
|
||||
1954-04-01,227
|
||||
1954-05-01,234
|
||||
1954-06-01,264
|
||||
1954-07-01,302
|
||||
1954-08-01,293
|
||||
1954-09-01,259
|
||||
1954-10-01,229
|
||||
1954-11-01,203
|
||||
1954-12-01,229
|
||||
1955-01-01,242
|
||||
1955-02-01,233
|
||||
1955-03-01,267
|
||||
1955-04-01,269
|
||||
1955-05-01,270
|
||||
1955-06-01,315
|
||||
1955-07-01,364
|
||||
1955-08-01,347
|
||||
1955-09-01,312
|
||||
1955-10-01,274
|
||||
1955-11-01,237
|
||||
1955-12-01,278
|
||||
1956-01-01,284
|
||||
1956-02-01,277
|
||||
1956-03-01,317
|
||||
1956-04-01,313
|
||||
1956-05-01,318
|
||||
1956-06-01,374
|
||||
1956-07-01,413
|
||||
1956-08-01,405
|
||||
1956-09-01,355
|
||||
1956-10-01,306
|
||||
1956-11-01,271
|
||||
1956-12-01,306
|
||||
1957-01-01,315
|
||||
1957-02-01,301
|
||||
1957-03-01,356
|
||||
1957-04-01,348
|
||||
1957-05-01,355
|
||||
1957-06-01,422
|
||||
1957-07-01,465
|
||||
1957-08-01,467
|
||||
1957-09-01,404
|
||||
1957-10-01,347
|
||||
1957-11-01,305
|
||||
1957-12-01,336
|
||||
1958-01-01,340
|
||||
1958-02-01,318
|
||||
1958-03-01,362
|
||||
1958-04-01,348
|
||||
1958-05-01,363
|
||||
1958-06-01,435
|
||||
1958-07-01,491
|
||||
1958-08-01,505
|
||||
1958-09-01,404
|
||||
1958-10-01,359
|
||||
1958-11-01,310
|
||||
1958-12-01,337
|
||||
1959-01-01,360
|
||||
1959-02-01,342
|
||||
1959-03-01,406
|
||||
1959-04-01,396
|
||||
1959-05-01,420
|
||||
1959-06-01,472
|
||||
1959-07-01,548
|
||||
1959-08-01,559
|
||||
1959-09-01,463
|
||||
1959-10-01,407
|
||||
1959-11-01,362
|
||||
1959-12-01,405
|
||||
1960-01-01,417
|
||||
1960-02-01,391
|
||||
1960-03-01,419
|
||||
1960-04-01,461
|
||||
1960-05-01,472
|
||||
1960-06-01,535
|
||||
1960-07-01,622
|
||||
1960-08-01,606
|
||||
1960-09-01,508
|
||||
1960-10-01,461
|
||||
1960-11-01,390
|
||||
1960-12-01,432
|
||||
|
BIN
__pycache__/app.cpython-311.pyc
Executable file
BIN
__pycache__/app.cpython-311.pyc
Executable file
Binary file not shown.
202
app.py
Executable file
202
app.py
Executable file
@ -0,0 +1,202 @@
|
||||
from flask import Flask, request, render_template, session
|
||||
from werkzeug.utils import secure_filename
|
||||
from models.time_series import process_time_series
|
||||
from models.plotting import create_comparison_plot
|
||||
from utils.file_handling import allowed_file, read_file, save_processed_file
|
||||
from utils.forecast_history import update_forecast_history, download_forecast_history
|
||||
import os
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['UPLOAD_FOLDER'] = 'Uploads'
|
||||
app.config['ALLOWED_EXTENSIONS'] = {'csv', 'xls', 'xlsx'}
|
||||
app.secret_key = 'your-secret-key' # Required for session management
|
||||
|
||||
# Ensure upload folder exists
|
||||
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
return render_template('index.html')
|
||||
|
||||
|
||||
@app.route('/upload', methods=['POST'])
|
||||
def upload_file():
|
||||
if 'file' not in request.files:
|
||||
return render_template('index.html', error='No file part')
|
||||
|
||||
file = request.files['file']
|
||||
if file.filename == '':
|
||||
return render_template('index.html', error='No selected file')
|
||||
|
||||
if file and allowed_file(file.filename):
|
||||
filename = secure_filename(file.filename)
|
||||
filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
||||
file.save(filepath)
|
||||
session['filepath'] = filepath # Store filepath in session
|
||||
session['forecast_history'] = [] # Initialize forecast history
|
||||
session['selected_indices'] = [] # Initialize selected indices
|
||||
|
||||
# Get user selections
|
||||
do_decomposition = 'decomposition' in request.form
|
||||
do_forecasting = 'forecasting' in request.form
|
||||
do_acf_pacf = 'acf_pacf' in request.form
|
||||
train_percent = float(request.form.get('train_percent', 80)) / 100
|
||||
test_percent = float(request.form.get('test_percent', 20)) / 100
|
||||
forecast_periods = int(request.form.get('forecast_periods', 12))
|
||||
model_type = request.form.get('model_type', 'ARIMA')
|
||||
|
||||
# Validate train/test percentages
|
||||
if abs(train_percent + test_percent - 1.0) > 0.01: # Allow small float precision errors
|
||||
return render_template('index.html', error='Train and test percentages must sum to 100%')
|
||||
|
||||
session['do_decomposition'] = do_decomposition
|
||||
session['do_forecasting'] = do_forecasting
|
||||
session['do_acf_pacf'] = do_acf_pacf
|
||||
session['train_percent'] = train_percent
|
||||
session['test_percent'] = test_percent
|
||||
session['forecast_periods'] = forecast_periods
|
||||
session['model_type'] = model_type
|
||||
|
||||
result = process_time_series(filepath, do_decomposition, do_forecasting, do_acf_pacf, train_percent,
|
||||
forecast_periods, model_type)
|
||||
|
||||
if 'error' in result:
|
||||
return render_template('index.html', error=result['error'])
|
||||
|
||||
# Update forecast history if unique
|
||||
if do_forecasting and result['metrics']:
|
||||
update_forecast_history(session, train_percent, test_percent, forecast_periods, model_type,
|
||||
result['metrics'])
|
||||
|
||||
return render_template('results.html',
|
||||
do_decomposition=do_decomposition,
|
||||
do_forecasting=do_forecasting,
|
||||
do_acf_pacf=do_acf_pacf,
|
||||
train_percent=train_percent * 100,
|
||||
test_percent=test_percent * 100,
|
||||
forecast_periods=forecast_periods,
|
||||
forecast_history=session['forecast_history'],
|
||||
selected_indices=session['selected_indices'],
|
||||
**result)
|
||||
|
||||
|
||||
@app.route('/reforecast', methods=['POST'])
|
||||
def reforecast():
|
||||
filepath = session.get('filepath')
|
||||
if not filepath or not os.path.exists(filepath):
|
||||
return render_template('index.html', error='Session expired or file not found. Please upload the file again.')
|
||||
|
||||
# Get user selections from reforecast form
|
||||
train_percent = float(request.form.get('train_percent', 80)) / 100
|
||||
test_percent = float(request.form.get('test_percent', 20)) / 100
|
||||
forecast_periods = int(request.form.get('forecast_periods', 12))
|
||||
model_type = request.form.get('model_type', 'ARIMA')
|
||||
add_to_existing = 'add_to_existing' in request.form
|
||||
|
||||
# Validate train/test percentages
|
||||
if abs(train_percent + test_percent - 1.0) > 0.01: # Allow small float precision errors
|
||||
return render_template('index.html', error='Train and test percentages must sum to 100%')
|
||||
|
||||
# Get original selections from session or defaults
|
||||
do_decomposition = session.get('do_decomposition', False)
|
||||
do_forecasting = True # Since this is a reforecast
|
||||
do_acf_pacf = session.get('do_acf_pacf', False)
|
||||
|
||||
result = process_time_series(filepath, do_decomposition, do_forecasting, do_acf_pacf, train_percent,
|
||||
forecast_periods, model_type)
|
||||
|
||||
if 'error' in result:
|
||||
return render_template('index.html', error=result['error'])
|
||||
|
||||
# Update forecast history if unique
|
||||
if do_forecasting and result['metrics']:
|
||||
update_forecast_history(session, train_percent, test_percent, forecast_periods, model_type, result['metrics'],
|
||||
add_to_existing)
|
||||
|
||||
# Update session with current parameters
|
||||
session['train_percent'] = train_percent
|
||||
session['test_percent'] = test_percent
|
||||
session['forecast_periods'] = forecast_periods
|
||||
session['model_type'] = model_type
|
||||
|
||||
# Generate comparison plot if multiple forecasts are selected
|
||||
if len(session.get('selected_indices', [])) > 1:
|
||||
result['forecast_html'] = create_comparison_plot(filepath, session['forecast_history'],
|
||||
session['selected_indices'])
|
||||
|
||||
return render_template('results.html',
|
||||
do_decomposition=do_decomposition,
|
||||
do_forecasting=do_forecasting,
|
||||
do_acf_pacf=do_acf_pacf,
|
||||
train_percent=train_percent * 100,
|
||||
test_percent=test_percent * 100,
|
||||
forecast_periods=forecast_periods,
|
||||
forecast_history=session['forecast_history'],
|
||||
selected_indices=session['selected_indices'],
|
||||
scroll_to_forecast=True,
|
||||
**result)
|
||||
|
||||
|
||||
@app.route('/compare_forecasts', methods=['POST'])
|
||||
def compare_forecasts():
|
||||
filepath = session.get('filepath')
|
||||
if not filepath or not os.path.exists(filepath):
|
||||
return render_template('index.html', error='Session expired or file not found. Please upload the file again.')
|
||||
|
||||
# Get selected forecast indices
|
||||
selected_indices = [int(idx) for idx in request.form.getlist('selected_forecasts')]
|
||||
if not selected_indices:
|
||||
return render_template('index.html', error='No forecasts selected for comparison')
|
||||
|
||||
# Update session with selected indices
|
||||
session['selected_indices'] = selected_indices
|
||||
session.modified = True
|
||||
|
||||
# Get current parameters and settings
|
||||
do_decomposition = session.get('do_decomposition', False)
|
||||
do_forecasting = session.get('do_forecasting', True)
|
||||
do_acf_pacf = session.get('do_acf_pacf', False)
|
||||
train_percent = session.get('train_percent', 0.8)
|
||||
test_percent = session.get('test_percent', 0.2)
|
||||
forecast_periods = session.get('forecast_periods', 12)
|
||||
model_type = session.get('model_type', 'ARIMA')
|
||||
|
||||
# Generate comparison plot
|
||||
forecast_html = create_comparison_plot(filepath, session['forecast_history'], selected_indices)
|
||||
|
||||
# Re-run the current forecast to maintain other results
|
||||
result = process_time_series(filepath, do_decomposition, do_forecasting, do_acf_pacf, train_percent,
|
||||
forecast_periods, model_type)
|
||||
|
||||
if 'error' in result:
|
||||
return render_template('index.html', error=result['error'])
|
||||
|
||||
result['forecast_html'] = forecast_html
|
||||
|
||||
return render_template('results.html',
|
||||
do_decomposition=do_decomposition,
|
||||
do_forecasting=do_forecasting,
|
||||
do_acf_pacf=do_acf_pacf,
|
||||
train_percent=train_percent * 100,
|
||||
test_percent=test_percent * 100,
|
||||
forecast_periods=forecast_periods,
|
||||
forecast_history=session['forecast_history'],
|
||||
selected_indices=selected_indices,
|
||||
scroll_to_forecast=True,
|
||||
**result)
|
||||
|
||||
|
||||
@app.route('/download_forecast_history')
|
||||
def download_forecast_history():
|
||||
return download_forecast_history(session)
|
||||
|
||||
|
||||
@app.route('/download/<filename>')
|
||||
def download_file(filename):
|
||||
filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
||||
return send_file(filepath, as_attachment=True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5000, debug=True)
|
||||
1
models/__init__.py
Executable file
1
models/__init__.py
Executable file
@ -0,0 +1 @@
|
||||
# Empty __init__.py to make models a package
|
||||
BIN
models/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
models/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
models/__pycache__/__init__.cpython-312.pyc
Executable file
BIN
models/__pycache__/__init__.cpython-312.pyc
Executable file
Binary file not shown.
BIN
models/__pycache__/plotting.cpython-311.pyc
Executable file
BIN
models/__pycache__/plotting.cpython-311.pyc
Executable file
Binary file not shown.
BIN
models/__pycache__/plotting.cpython-312.pyc
Executable file
BIN
models/__pycache__/plotting.cpython-312.pyc
Executable file
Binary file not shown.
BIN
models/__pycache__/time_series.cpython-311.pyc
Executable file
BIN
models/__pycache__/time_series.cpython-311.pyc
Executable file
Binary file not shown.
BIN
models/__pycache__/time_series.cpython-312.pyc
Executable file
BIN
models/__pycache__/time_series.cpython-312.pyc
Executable file
Binary file not shown.
138
models/plotting.py
Executable file
138
models/plotting.py
Executable file
@ -0,0 +1,138 @@
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
import plotly.io as pio
|
||||
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Agg')
|
||||
import matplotlib.pyplot as plt
|
||||
import io
|
||||
import base64
|
||||
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
||||
import pmdarima as pm
|
||||
from prophet import Prophet
|
||||
|
||||
|
||||
def create_acf_pacf_plots(data):
|
||||
# Create ACF and PACF plots using matplotlib
|
||||
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 8))
|
||||
|
||||
plot_acf(data, ax=ax1, lags=40)
|
||||
ax1.set_title('Autocorrelation Function')
|
||||
|
||||
plot_pacf(data, ax=ax2, lags=40)
|
||||
ax2.set_title('Partial Autocorrelation Function')
|
||||
|
||||
# Convert matplotlib plot to Plotly
|
||||
buf = io.BytesIO()
|
||||
plt.savefig(buf, format='png')
|
||||
plt.close(fig)
|
||||
buf.seek(0)
|
||||
img_str = base64.b64encode(buf.getvalue()).decode('utf-8')
|
||||
|
||||
# Create Plotly figure with image
|
||||
fig_plotly = go.Figure()
|
||||
fig_plotly.add_layout_image(
|
||||
dict(
|
||||
source=f'data:image/png;base64,{img_str}',
|
||||
x=0,
|
||||
y=1,
|
||||
xref="paper",
|
||||
yref="paper",
|
||||
sizex=1,
|
||||
sizey=1,
|
||||
sizing="stretch",
|
||||
opacity=1,
|
||||
layer="below"
|
||||
)
|
||||
)
|
||||
fig_plotly.update_layout(
|
||||
height=600,
|
||||
showlegend=False,
|
||||
xaxis=dict(visible=False),
|
||||
yaxis=dict(visible=False)
|
||||
)
|
||||
return pio.to_html(fig_plotly, full_html=False)
|
||||
|
||||
|
||||
def create_comparison_plot(filepath, forecast_history, selected_indices):
|
||||
# Read data
|
||||
if filepath.endswith('.csv'):
|
||||
df = pd.read_csv(filepath)
|
||||
else:
|
||||
df = pd.read_excel(filepath)
|
||||
|
||||
date_col = df.columns[0]
|
||||
value_col = df.columns[1]
|
||||
df[date_col] = pd.to_datetime(df[date_col])
|
||||
df.set_index(date_col, inplace=True)
|
||||
|
||||
# Create Plotly figure
|
||||
fig = go.Figure()
|
||||
fig.add_trace(go.Scatter(x=df.index, y=df[value_col], name='Historical', line=dict(color='black')))
|
||||
|
||||
# Use Plotly qualitative colors
|
||||
colors = px.colors.qualitative.Plotly
|
||||
|
||||
# Generate forecasts for selected indices
|
||||
for idx, run_idx in enumerate(selected_indices):
|
||||
entry = forecast_history[run_idx]
|
||||
train_percent = entry['train_percent'] / 100
|
||||
forecast_periods = entry['forecast_periods']
|
||||
model_type = entry['model_type']
|
||||
|
||||
# Split data
|
||||
train_size = int(len(df) * train_percent)
|
||||
test_size = len(df) - train_size
|
||||
train_data = df[value_col].iloc[:train_size]
|
||||
test_data = df[value_col].iloc[train_size:] if test_size > 0 else pd.Series()
|
||||
forecast_dates = pd.date_range(start=df.index[-1], periods=forecast_periods + 1, freq=df.index.inferred_freq)[
|
||||
1:]
|
||||
|
||||
# Run model based on model_type
|
||||
forecast = None
|
||||
if model_type == 'ARIMA':
|
||||
model = pm.auto_arima(train_data,
|
||||
seasonal=True,
|
||||
m=12,
|
||||
start_p=0, start_q=0,
|
||||
max_p=3, max_q=3,
|
||||
start_P=0, start_Q=0,
|
||||
max_P=2, max_Q=2,
|
||||
d=1, D=1,
|
||||
trace=False,
|
||||
error_action='ignore',
|
||||
suppress_warnings=True,
|
||||
stepwise=True)
|
||||
model_fit = model.fit(train_data)
|
||||
forecast = model_fit.predict(n_periods=forecast_periods)
|
||||
|
||||
elif model_type == 'Exponential Smoothing':
|
||||
model = ExponentialSmoothing(train_data,
|
||||
trend='add',
|
||||
seasonal='add',
|
||||
seasonal_periods=12)
|
||||
model_fit = model.fit()
|
||||
forecast = model_fit.forecast(forecast_periods)
|
||||
|
||||
elif model_type == 'Prophet':
|
||||
prophet_df = train_data.reset_index().rename(columns={date_col: 'ds', value_col: 'y'})
|
||||
model = Prophet(yearly_seasonality=True, weekly_seasonality=False, daily_seasonality=False)
|
||||
model.add_seasonality(name='monthly', period=30.5, fourier_order=5)
|
||||
model_fit = model.fit(prophet_df)
|
||||
future = model.make_future_dataframe(periods=forecast_periods, freq=df.index.inferred_freq)
|
||||
forecast_full = model_fit.predict(future)
|
||||
forecast = forecast_full['yhat'].iloc[-forecast_periods:].values
|
||||
|
||||
# Add test data if available (only once to avoid clutter)
|
||||
if test_size > 0 and idx == 0:
|
||||
fig.add_trace(go.Scatter(x=df.index[train_size:], y=test_data, name='Test Data', line=dict(color='green')))
|
||||
|
||||
# Add forecast
|
||||
label = f"Forecast Run {run_idx + 1}: {model_type}, {entry['train_percent']:.0f}/{entry['test_percent']:.0f}, {forecast_periods} periods"
|
||||
fig.add_trace(go.Scatter(x=forecast_dates, y=forecast, name=label,
|
||||
line=dict(dash='dash', color=colors[idx % len(colors)])))
|
||||
|
||||
fig.update_layout(title='Forecast Comparison', height=400, showlegend=True)
|
||||
return pio.to_html(fig, full_html=False)
|
||||
176
models/time_series.py
Executable file
176
models/time_series.py
Executable file
@ -0,0 +1,176 @@
|
||||
import pandas as pd
|
||||
from statsmodels.tsa.seasonal import seasonal_decompose
|
||||
from statsmodels.tsa.holtwinters import ExponentialSmoothing
|
||||
import pmdarima as pm
|
||||
from prophet import Prophet
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
from plotly.subplots import make_subplots
|
||||
import plotly.io as pio
|
||||
import numpy as np
|
||||
from sklearn.metrics import mean_absolute_error, mean_squared_error
|
||||
from utils.file_handling import save_processed_file
|
||||
from .plotting import create_acf_pacf_plots
|
||||
|
||||
|
||||
def process_time_series(filepath, do_decomposition, do_forecasting, do_acf_pacf, train_percent, forecast_periods,
|
||||
model_type):
|
||||
try:
|
||||
# Read file
|
||||
if filepath.endswith('.csv'):
|
||||
df = pd.read_csv(filepath)
|
||||
else:
|
||||
df = pd.read_excel(filepath)
|
||||
|
||||
# Ensure datetime column exists
|
||||
date_col = df.columns[0] # Assume first column is date
|
||||
value_col = df.columns[1] # Assume second column is value
|
||||
df[date_col] = pd.to_datetime(df[date_col])
|
||||
df.set_index(date_col, inplace=True)
|
||||
|
||||
# Initialize variables
|
||||
plot_html = None
|
||||
forecast_html = None
|
||||
acf_pacf_html = None
|
||||
summary = df[value_col].describe().to_dict()
|
||||
model_params = None
|
||||
train_size = None
|
||||
test_size = None
|
||||
metrics = None
|
||||
|
||||
# Save processed data
|
||||
processed_df = df.copy()
|
||||
|
||||
# Time series decomposition
|
||||
if do_decomposition:
|
||||
decomposition = seasonal_decompose(df[value_col], model='additive', period=12)
|
||||
fig = make_subplots(rows=4, cols=1,
|
||||
subplot_titles=('Original Series', 'Trend', 'Seasonality', 'Residuals'))
|
||||
|
||||
fig.add_trace(go.Scatter(x=df.index, y=df[value_col], name='Original'), row=1, col=1)
|
||||
fig.add_trace(go.Scatter(x=df.index, y=decomposition.trend, name='Trend'), row=2, col=1)
|
||||
fig.add_trace(go.Scatter(x=df.index, y=decomposition.seasonal, name='Seasonality'), row=3, col=1)
|
||||
fig.add_trace(go.Scatter(x=df.index, y=decomposition.resid, name='Residuals'), row=4, col=1)
|
||||
|
||||
fig.update_layout(height=800, showlegend=True)
|
||||
plot_html = pio.to_html(fig, full_html=False)
|
||||
|
||||
processed_df['Trend'] = decomposition.trend
|
||||
processed_df['Seasonality'] = decomposition.seasonal
|
||||
processed_df['Residuals'] = decomposition.resid
|
||||
|
||||
# Forecasting
|
||||
if do_forecasting:
|
||||
# Split data into train and test
|
||||
train_size = int(len(df) * train_percent)
|
||||
test_size = len(df) - train_size
|
||||
train_data = df[value_col].iloc[:train_size]
|
||||
test_data = df[value_col].iloc[train_size:] if test_size > 0 else pd.Series()
|
||||
forecast_dates = pd.date_range(start=df.index[-1], periods=forecast_periods + 1,
|
||||
freq=df.index.inferred_freq)[1:]
|
||||
|
||||
# Initialize forecast and model parameters
|
||||
forecast = None
|
||||
if model_type == 'ARIMA':
|
||||
# Auto ARIMA for best parameters
|
||||
model = pm.auto_arima(train_data,
|
||||
seasonal=True,
|
||||
m=12,
|
||||
start_p=0, start_q=0,
|
||||
max_p=3, max_q=3,
|
||||
start_P=0, start_Q=0,
|
||||
max_P=2, max_Q=2,
|
||||
d=1, D=1,
|
||||
trace=False,
|
||||
error_action='ignore',
|
||||
suppress_warnings=True,
|
||||
stepwise=True)
|
||||
|
||||
# Fit ARIMA with best parameters
|
||||
model_fit = model.fit(train_data)
|
||||
forecast = model_fit.predict(n_periods=forecast_periods)
|
||||
model_params = f"{model.order}, Seasonal{model.seasonal_order}"
|
||||
|
||||
# Calculate metrics on test data if available
|
||||
if test_size > 0:
|
||||
test_predictions = model_fit.predict(n_periods=test_size)
|
||||
mae = mean_absolute_error(test_data, test_predictions)
|
||||
mse = mean_squared_error(test_data, test_predictions)
|
||||
rmse = np.sqrt(mse)
|
||||
metrics = {'MAE': mae, 'MSE': mse, 'RMSE': rmse}
|
||||
|
||||
elif model_type == 'Exponential Smoothing':
|
||||
# Fit Exponential Smoothing model
|
||||
model = ExponentialSmoothing(train_data,
|
||||
trend='add',
|
||||
seasonal='add',
|
||||
seasonal_periods=12)
|
||||
model_fit = model.fit()
|
||||
forecast = model_fit.forecast(forecast_periods)
|
||||
model_params = "Additive Trend, Additive Seasonal"
|
||||
|
||||
# Calculate metrics on test data if available
|
||||
if test_size > 0:
|
||||
test_predictions = model_fit.forecast(test_size)
|
||||
mae = mean_absolute_error(test_data, test_predictions)
|
||||
mse = mean_squared_error(test_data, test_predictions)
|
||||
rmse = np.sqrt(mse)
|
||||
metrics = {'MAE': mae, 'MSE': mse, 'RMSE': rmse}
|
||||
|
||||
elif model_type == 'Prophet':
|
||||
# Prepare data for Prophet
|
||||
prophet_df = train_data.reset_index().rename(columns={date_col: 'ds', value_col: 'y'})
|
||||
model = Prophet(yearly_seasonality=True, weekly_seasonality=False, daily_seasonality=False)
|
||||
model.add_seasonality(name='monthly', period=30.5, fourier_order=5)
|
||||
model_fit = model.fit(prophet_df)
|
||||
|
||||
# Create future dataframe
|
||||
future = model.make_future_dataframe(periods=forecast_periods, freq=df.index.inferred_freq)
|
||||
forecast_full = model_fit.predict(future)
|
||||
forecast = forecast_full['yhat'].iloc[-forecast_periods:].values
|
||||
model_params = "Prophet"
|
||||
|
||||
# Calculate metrics on test data if available
|
||||
if test_size > 0:
|
||||
test_future = model.make_future_dataframe(periods=test_size, freq=df.index.inferred_freq)
|
||||
test_predictions = model.predict(test_future)['yhat'].iloc[-test_size:].values
|
||||
mae = mean_absolute_error(test_data, test_predictions)
|
||||
mse = mean_squared_error(test_data, test_predictions)
|
||||
rmse = np.sqrt(mse)
|
||||
metrics = {'MAE': mae, 'MSE': mse, 'RMSE': rmse}
|
||||
|
||||
# Forecast plot
|
||||
forecast_fig = go.Figure()
|
||||
forecast_fig.add_trace(go.Scatter(x=df.index, y=df[value_col], name='Historical'))
|
||||
if test_size > 0:
|
||||
forecast_fig.add_trace(
|
||||
go.Scatter(x=df.index[train_size:], y=test_data, name='Test Data', line=dict(color='green')))
|
||||
forecast_fig.add_trace(
|
||||
go.Scatter(x=forecast_dates, y=forecast, name=f'Forecast ({model_type})', line=dict(dash='dash')))
|
||||
forecast_fig.update_layout(title=f'Forecast ({model_type})', height=400)
|
||||
forecast_html = pio.to_html(forecast_fig, full_html=False)
|
||||
|
||||
# ACF/PACF plots
|
||||
if do_acf_pacf:
|
||||
acf_pacf_html = create_acf_pacf_plots(df[value_col])
|
||||
|
||||
# Save processed data
|
||||
filename = save_processed_file(processed_df, filepath)
|
||||
|
||||
return {
|
||||
'plot_html': plot_html,
|
||||
'forecast_html': forecast_html,
|
||||
'acf_pacf_html': acf_pacf_html,
|
||||
'summary': summary,
|
||||
'filename': filename,
|
||||
'model_params': model_params,
|
||||
'train_size': train_size,
|
||||
'test_size': test_size,
|
||||
'metrics': metrics,
|
||||
'forecast_dates': forecast_dates.tolist() if do_forecasting else [],
|
||||
'forecast_values': forecast.tolist() if do_forecasting else [],
|
||||
'model_type': model_type
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
41
requirements.txt
Executable file
41
requirements.txt
Executable file
@ -0,0 +1,41 @@
|
||||
blinker==1.9.0
|
||||
click==8.2.1
|
||||
cmdstanpy==1.2.5
|
||||
contourpy==1.3.3
|
||||
cycler==0.12.1
|
||||
Cython==3.0.11
|
||||
et_xmlfile==2.0.0
|
||||
Flask==3.1.1
|
||||
fonttools==4.59.0
|
||||
holidays==0.77
|
||||
importlib_resources==6.5.2
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.6
|
||||
joblib==1.4.2
|
||||
kiwisolver==1.4.8
|
||||
MarkupSafe==3.0.2
|
||||
matplotlib==3.10.3
|
||||
narwhals==2.0.1
|
||||
numpy==1.26.4
|
||||
openpyxl==3.1.5
|
||||
packaging==24.1
|
||||
pandas==2.2.3
|
||||
patsy==1.0.1
|
||||
pillow==11.3.0
|
||||
plotly==6.2.0
|
||||
pmdarima==2.0.4
|
||||
prophet==1.1.7
|
||||
pyparsing==3.2.3
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2025.2
|
||||
scikit-learn==1.5.2
|
||||
scipy==1.14.1
|
||||
setuptools==75.1.0
|
||||
six==1.17.0
|
||||
stanio==0.5.1
|
||||
statsmodels==0.14.3
|
||||
threadpoolctl==3.6.0
|
||||
tqdm==4.67.1
|
||||
tzdata==2025.2
|
||||
urllib3==2.2.3
|
||||
Werkzeug==3.1.3
|
||||
106
templates/index.html
Executable file
106
templates/index.html
Executable file
@ -0,0 +1,106 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Time Series Analysis</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<style>
|
||||
body {
|
||||
background-color: #f8f9fa;
|
||||
}
|
||||
.container {
|
||||
max-width: 800px;
|
||||
margin-top: 20px;
|
||||
}
|
||||
.form-check-label {
|
||||
margin-left: 10px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<!-- Warning Message -->
|
||||
<div class="alert alert-warning alert-dismissible fade show" role="alert">
|
||||
<strong>Warning:</strong> This app does not save files or outputs. All work will be lost when the page is closed.
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||
</div>
|
||||
|
||||
<h1 class="text-center mb-4">Time Series Analysis</h1>
|
||||
|
||||
{% if error %}
|
||||
<div class="alert alert-danger">{{ error }}</div>
|
||||
{% endif %}
|
||||
|
||||
<form method="post" enctype="multipart/form-data" action="/upload">
|
||||
<div class="mb-3">
|
||||
<label for="file" class="form-label">Upload CSV or Excel File</label>
|
||||
<input type="file" class="form-control" id="file" name="file" accept=".csv,.xls,.xlsx" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<h5>Analysis Options</h5>
|
||||
<div class="form-check">
|
||||
<input type="checkbox" class="form-check-input" id="decomposition" name="decomposition">
|
||||
<label class="form-check-label" for="decomposition">Perform Decomposition</label>
|
||||
</div>
|
||||
<div class="form-check">
|
||||
<input type="checkbox" class="form-check-input" id="forecasting" name="forecasting" checked>
|
||||
<label class="form-check-label" for="forecasting">Perform Forecasting</label>
|
||||
</div>
|
||||
<div class="form-check">
|
||||
<input type="checkbox" class="form-check-input" id="acf_pacf" name="acf_pacf">
|
||||
<label class="form-check-label" for="acf_pacf">Generate ACF/PACF Plots</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="train_percent" class="form-label">Train Percentage</label>
|
||||
<input type="number" class="form-control" id="train_percent" name="train_percent" value="80" min="1" max="99" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="test_percent" class="form-label">Test Percentage</label>
|
||||
<input type="number" class="form-control" id="test_percent" name="test_percent" value="20" min="1" max="99" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="forecast_periods" class="form-label">Forecast Periods</label>
|
||||
<input type="number" class="form-control" id="forecast_periods" name="forecast_periods" value="12" min="1" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="model_type" class="form-label">Forecast Model</label>
|
||||
<select class="form-control" id="model_type" name="model_type">
|
||||
<option value="ARIMA">ARIMA</option>
|
||||
<option value="Exponential Smoothing">Exponential Smoothing</option>
|
||||
<option value="Prophet">Prophet</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<button type="submit" class="btn btn-primary">Analyze</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
// Sync Train and Test Percentage inputs
|
||||
const trainInput = document.getElementById('train_percent');
|
||||
const testInput = document.getElementById('test_percent');
|
||||
|
||||
function syncPercentages(source, target) {
|
||||
source.addEventListener('input', () => {
|
||||
const value = parseFloat(source.value);
|
||||
if (!isNaN(value) && value >= 1 && value <= 99) {
|
||||
target.value = (100 - value).toFixed(0);
|
||||
} else {
|
||||
target.value = '';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
syncPercentages(trainInput, testInput);
|
||||
syncPercentages(testInput, trainInput);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
238
templates/results.html
Executable file
238
templates/results.html
Executable file
@ -0,0 +1,238 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Analysis Results</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<script src="https://cdn.jsdelivr.net/npm/plotly@2.27.0/dist/plotly.min.js"></script>
|
||||
<style>
|
||||
body {
|
||||
background-color: #f8f9fa;
|
||||
}
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin-top: 20px;
|
||||
}
|
||||
.collapsible-section {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
.collapsible-section .btn-toggle {
|
||||
background-color: #007bff;
|
||||
color: white;
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
padding: 10px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
.collapsible-section .btn-toggle:hover {
|
||||
background-color: #0056b3;
|
||||
}
|
||||
.collapsible-section .collapse {
|
||||
margin-top: 10px;
|
||||
}
|
||||
.plotly-container {
|
||||
width: 100%;
|
||||
max-width: none;
|
||||
margin: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<!-- Warning Message -->
|
||||
<div class="alert alert-warning alert-dismissible fade show" role="alert">
|
||||
<strong>Warning:</strong> This app does not save files or outputs. All work will be lost when the page is closed.
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||
</div>
|
||||
|
||||
<h1 class="text-center mb-4">Analysis Results</h1>
|
||||
|
||||
<!-- Summary Statistics -->
|
||||
<div class="collapsible-section">
|
||||
<button class="btn btn-toggle" type="button" data-bs-toggle="collapse" data-bs-target="#collapse-summary" aria-expanded="true" aria-controls="collapse-summary">
|
||||
Summary Statistics
|
||||
</button>
|
||||
<div class="collapse show" id="collapse-summary">
|
||||
<div class="card card-body">
|
||||
<h5>Summary Statistics</h5>
|
||||
<table class="table">
|
||||
{% for key, value in summary.items() %}
|
||||
<tr>
|
||||
<td>{{ key }}</td>
|
||||
<td>{{ value | round(2) }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
<a href="{{ url_for('download_file', filename=filename) }}" class="btn btn-secondary">Download Processed Data</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Decomposition -->
|
||||
{% if do_decomposition and plot_html %}
|
||||
<div class="collapsible-section">
|
||||
<button class="btn btn-toggle" type="button" data-bs-toggle="collapse" data-bs-target="#collapse-decomposition" aria-expanded="false" aria-controls="collapse-decomposition">
|
||||
Decomposition
|
||||
</button>
|
||||
<div class="collapse" id="collapse-decomposition">
|
||||
<div class="card card-body">
|
||||
<h5>Decomposition</h5>
|
||||
<div class="plotly-container" style="width: 1200px; height: 800px;">{{ plot_html | safe }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- ACF/PACF Plots -->
|
||||
{% if do_acf_pacf and acf_pacf_html %}
|
||||
<div class="collapsible-section">
|
||||
<button class="btn btn-toggle" type="button" data-bs-toggle="collapse" data-bs-target="#collapse-acf-pacf" aria-expanded="false" aria-controls="collapse-acf-pacf">
|
||||
ACF/PACF Plots
|
||||
</button>
|
||||
<div class="collapse" id="collapse-acf-pacf">
|
||||
<div class="card card-body">
|
||||
<h5>ACF/PACF Plots</h5>
|
||||
<div class="plotly-container" style="width: 1200px; height: 800px;">{{ acf_pacf_html | safe }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Forecasting -->
|
||||
{% if do_forecasting and forecast_html %}
|
||||
<div class="collapsible-section">
|
||||
<button class="btn btn-toggle" type="button" data-bs-toggle="collapse" data-bs-target="#collapse-forecasting" aria-expanded="{{ 'true' if scroll_to_forecast else 'false' }}" aria-controls="collapse-forecasting">
|
||||
Forecasting
|
||||
</button>
|
||||
<div class="collapse {{ 'show' if scroll_to_forecast else '' }}" id="collapse-forecasting">
|
||||
<div class="card card-body">
|
||||
<h5>Forecast Plot</h5>
|
||||
<p><strong>Model:</strong> {{ model_type }}</p>
|
||||
{% if model_params %}
|
||||
<p><strong>Model Parameters:</strong> {{ model_params }}</p>
|
||||
{% endif %}
|
||||
{% if metrics %}
|
||||
<p><strong>Test Set Metrics:</strong></p>
|
||||
<ul>
|
||||
<li>MAE: {{ metrics.MAE | round(2) }}</li>
|
||||
<li>MSE: {{ metrics.MSE | round(2) }}</li>
|
||||
<li>RMSE: {{ metrics.RMSE | round(2) }}</li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
<div class="plotly-container" style="width: 1200px; height: 400px;">{{ forecast_html | safe }}</div>
|
||||
|
||||
{% if forecast_history %}
|
||||
<h5 class="mt-4">Forecast History</h5>
|
||||
<form method="post" action="{{ url_for('compare_forecasts') }}">
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Select</th>
|
||||
<th>Run</th>
|
||||
<th>Train Percent (%)</th>
|
||||
<th>Test Percent (%)</th>
|
||||
<th>Forecast Periods</th>
|
||||
<th>MAE</th>
|
||||
<th>MSE</th>
|
||||
<th>RMSE</th>
|
||||
<th>Model</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for entry in forecast_history %}
|
||||
<tr>
|
||||
<td><input type="checkbox" name="selected_forecasts" value="{{ loop.index0 }}" {{ 'checked' if loop.index0 in selected_indices else '' }}></td>
|
||||
<td>{{ loop.index }}</td>
|
||||
<td>{{ entry.train_percent | round(2) }}</td>
|
||||
<td>{{ entry.test_percent | round(2) }}</td>
|
||||
<td>{{ entry.forecast_periods }}</td>
|
||||
<td>{{ entry.mae | round(2) if entry.mae else '-' }}</td>
|
||||
<td>{{ entry.mse | round(2) if entry.mse else '-' }}</td>
|
||||
<td>{{ entry.rmse | round(2) if entry.rmse else '-' }}</td>
|
||||
<td>{{ entry.model_type }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<button type="submit" class="btn btn-primary">Compare Selected Forecasts</button>
|
||||
</form>
|
||||
<a href="{{ url_for('download_forecast_history') }}" class="btn btn-secondary mt-2">Download Forecast History</a>
|
||||
{% endif %}
|
||||
|
||||
<h5 class="mt-4">Re-forecast</h5>
|
||||
<form method="post" action="{{ url_for('reforecast') }}">
|
||||
<div class="mb-3">
|
||||
<label for="train_percent" class="form-label">Train Percentage</label>
|
||||
<input type="number" class="form-control" id="train_percent" name="train_percent" value="{{ train_percent }}" min="1" max="99" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="test_percent" class="form-label">Test Percentage</label>
|
||||
<input type="number" class="form-control" id="test_percent" name="test_percent" value="{{ test_percent }}" min="1" max="99" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="forecast_periods" class="form-label">Forecast Periods</label>
|
||||
<input type="number" class="form-control" id="forecast_periods" name="forecast_periods" value="{{ forecast_periods }}" min="1" required>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="model_type" class="form-label">Forecast Model</label>
|
||||
<select class="form-control" id="model_type" name="model_type">
|
||||
<option value="ARIMA" {{ 'selected' if model_type == 'ARIMA' else '' }}>ARIMA</option>
|
||||
<option value="Exponential Smoothing" {{ 'selected' if model_type == 'Exponential Smoothing' else '' }}>Exponential Smoothing</option>
|
||||
<option value="Prophet" {{ 'selected' if model_type == 'Prophet' else '' }}>Prophet</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input type="checkbox" class="form-check-input" id="add_to_existing" name="add_to_existing">
|
||||
<label class="form-check-label" for="add_to_existing">Add to Existing Plots</label>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Run Re-forecast</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
// Sync Train and Test Percentage inputs
|
||||
const trainInput = document.getElementById('train_percent');
|
||||
const testInput = document.getElementById('test_percent');
|
||||
|
||||
function syncPercentages(source, target) {
|
||||
source.addEventListener('input', () => {
|
||||
const value = parseFloat(source.value);
|
||||
if (!isNaN(value) && value >= 1 && value <= 99) {
|
||||
target.value = (100 - value).toFixed(0);
|
||||
} else {
|
||||
target.value = '';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
syncPercentages(trainInput, testInput);
|
||||
syncPercentages(testInput, trainInput);
|
||||
|
||||
// Scroll to and expand forecasting section if scroll_to_forecast is true
|
||||
{% if scroll_to_forecast %}
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
const forecastingSection = document.querySelector('#collapse-forecasting');
|
||||
if (forecastingSection) {
|
||||
forecastingSection.classList.add('show');
|
||||
forecastingSection.scrollIntoView({ behavior: 'smooth' });
|
||||
}
|
||||
});
|
||||
{% endif %}
|
||||
|
||||
// Resize Plotly plots on section expand
|
||||
document.querySelectorAll('.collapse').forEach(collapse => {
|
||||
collapse.addEventListener('shown.bs.collapse', () => {
|
||||
if (typeof Plotly !== 'undefined') {
|
||||
Plotly.Plots.resize(collapse.querySelector('.plotly-container'));
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
145
uploads/airline_passengers.csv
Executable file
145
uploads/airline_passengers.csv
Executable file
@ -0,0 +1,145 @@
|
||||
"Month","Thousands of Passengers"
|
||||
"1949-01",112
|
||||
"1949-02",118
|
||||
"1949-03",132
|
||||
"1949-04",129
|
||||
"1949-05",121
|
||||
"1949-06",135
|
||||
"1949-07",148
|
||||
"1949-08",148
|
||||
"1949-09",136
|
||||
"1949-10",119
|
||||
"1949-11",104
|
||||
"1949-12",118
|
||||
"1950-01",115
|
||||
"1950-02",126
|
||||
"1950-03",141
|
||||
"1950-04",135
|
||||
"1950-05",125
|
||||
"1950-06",149
|
||||
"1950-07",170
|
||||
"1950-08",170
|
||||
"1950-09",158
|
||||
"1950-10",133
|
||||
"1950-11",114
|
||||
"1950-12",140
|
||||
"1951-01",145
|
||||
"1951-02",150
|
||||
"1951-03",178
|
||||
"1951-04",163
|
||||
"1951-05",172
|
||||
"1951-06",178
|
||||
"1951-07",199
|
||||
"1951-08",199
|
||||
"1951-09",184
|
||||
"1951-10",162
|
||||
"1951-11",146
|
||||
"1951-12",166
|
||||
"1952-01",171
|
||||
"1952-02",180
|
||||
"1952-03",193
|
||||
"1952-04",181
|
||||
"1952-05",183
|
||||
"1952-06",218
|
||||
"1952-07",230
|
||||
"1952-08",242
|
||||
"1952-09",209
|
||||
"1952-10",191
|
||||
"1952-11",172
|
||||
"1952-12",194
|
||||
"1953-01",196
|
||||
"1953-02",196
|
||||
"1953-03",236
|
||||
"1953-04",235
|
||||
"1953-05",229
|
||||
"1953-06",243
|
||||
"1953-07",264
|
||||
"1953-08",272
|
||||
"1953-09",237
|
||||
"1953-10",211
|
||||
"1953-11",180
|
||||
"1953-12",201
|
||||
"1954-01",204
|
||||
"1954-02",188
|
||||
"1954-03",235
|
||||
"1954-04",227
|
||||
"1954-05",234
|
||||
"1954-06",264
|
||||
"1954-07",302
|
||||
"1954-08",293
|
||||
"1954-09",259
|
||||
"1954-10",229
|
||||
"1954-11",203
|
||||
"1954-12",229
|
||||
"1955-01",242
|
||||
"1955-02",233
|
||||
"1955-03",267
|
||||
"1955-04",269
|
||||
"1955-05",270
|
||||
"1955-06",315
|
||||
"1955-07",364
|
||||
"1955-08",347
|
||||
"1955-09",312
|
||||
"1955-10",274
|
||||
"1955-11",237
|
||||
"1955-12",278
|
||||
"1956-01",284
|
||||
"1956-02",277
|
||||
"1956-03",317
|
||||
"1956-04",313
|
||||
"1956-05",318
|
||||
"1956-06",374
|
||||
"1956-07",413
|
||||
"1956-08",405
|
||||
"1956-09",355
|
||||
"1956-10",306
|
||||
"1956-11",271
|
||||
"1956-12",306
|
||||
"1957-01",315
|
||||
"1957-02",301
|
||||
"1957-03",356
|
||||
"1957-04",348
|
||||
"1957-05",355
|
||||
"1957-06",422
|
||||
"1957-07",465
|
||||
"1957-08",467
|
||||
"1957-09",404
|
||||
"1957-10",347
|
||||
"1957-11",305
|
||||
"1957-12",336
|
||||
"1958-01",340
|
||||
"1958-02",318
|
||||
"1958-03",362
|
||||
"1958-04",348
|
||||
"1958-05",363
|
||||
"1958-06",435
|
||||
"1958-07",491
|
||||
"1958-08",505
|
||||
"1958-09",404
|
||||
"1958-10",359
|
||||
"1958-11",310
|
||||
"1958-12",337
|
||||
"1959-01",360
|
||||
"1959-02",342
|
||||
"1959-03",406
|
||||
"1959-04",396
|
||||
"1959-05",420
|
||||
"1959-06",472
|
||||
"1959-07",548
|
||||
"1959-08",559
|
||||
"1959-09",463
|
||||
"1959-10",407
|
||||
"1959-11",362
|
||||
"1959-12",405
|
||||
"1960-01",417
|
||||
"1960-02",391
|
||||
"1960-03",419
|
||||
"1960-04",461
|
||||
"1960-05",472
|
||||
"1960-06",535
|
||||
"1960-07",622
|
||||
"1960-08",606
|
||||
"1960-09",508
|
||||
"1960-10",461
|
||||
"1960-11",390
|
||||
"1960-12",432
|
||||
|
145
uploads/processed_airline_passengers.csv
Executable file
145
uploads/processed_airline_passengers.csv
Executable file
@ -0,0 +1,145 @@
|
||||
Month,Thousands of Passengers
|
||||
1949-01-01,112
|
||||
1949-02-01,118
|
||||
1949-03-01,132
|
||||
1949-04-01,129
|
||||
1949-05-01,121
|
||||
1949-06-01,135
|
||||
1949-07-01,148
|
||||
1949-08-01,148
|
||||
1949-09-01,136
|
||||
1949-10-01,119
|
||||
1949-11-01,104
|
||||
1949-12-01,118
|
||||
1950-01-01,115
|
||||
1950-02-01,126
|
||||
1950-03-01,141
|
||||
1950-04-01,135
|
||||
1950-05-01,125
|
||||
1950-06-01,149
|
||||
1950-07-01,170
|
||||
1950-08-01,170
|
||||
1950-09-01,158
|
||||
1950-10-01,133
|
||||
1950-11-01,114
|
||||
1950-12-01,140
|
||||
1951-01-01,145
|
||||
1951-02-01,150
|
||||
1951-03-01,178
|
||||
1951-04-01,163
|
||||
1951-05-01,172
|
||||
1951-06-01,178
|
||||
1951-07-01,199
|
||||
1951-08-01,199
|
||||
1951-09-01,184
|
||||
1951-10-01,162
|
||||
1951-11-01,146
|
||||
1951-12-01,166
|
||||
1952-01-01,171
|
||||
1952-02-01,180
|
||||
1952-03-01,193
|
||||
1952-04-01,181
|
||||
1952-05-01,183
|
||||
1952-06-01,218
|
||||
1952-07-01,230
|
||||
1952-08-01,242
|
||||
1952-09-01,209
|
||||
1952-10-01,191
|
||||
1952-11-01,172
|
||||
1952-12-01,194
|
||||
1953-01-01,196
|
||||
1953-02-01,196
|
||||
1953-03-01,236
|
||||
1953-04-01,235
|
||||
1953-05-01,229
|
||||
1953-06-01,243
|
||||
1953-07-01,264
|
||||
1953-08-01,272
|
||||
1953-09-01,237
|
||||
1953-10-01,211
|
||||
1953-11-01,180
|
||||
1953-12-01,201
|
||||
1954-01-01,204
|
||||
1954-02-01,188
|
||||
1954-03-01,235
|
||||
1954-04-01,227
|
||||
1954-05-01,234
|
||||
1954-06-01,264
|
||||
1954-07-01,302
|
||||
1954-08-01,293
|
||||
1954-09-01,259
|
||||
1954-10-01,229
|
||||
1954-11-01,203
|
||||
1954-12-01,229
|
||||
1955-01-01,242
|
||||
1955-02-01,233
|
||||
1955-03-01,267
|
||||
1955-04-01,269
|
||||
1955-05-01,270
|
||||
1955-06-01,315
|
||||
1955-07-01,364
|
||||
1955-08-01,347
|
||||
1955-09-01,312
|
||||
1955-10-01,274
|
||||
1955-11-01,237
|
||||
1955-12-01,278
|
||||
1956-01-01,284
|
||||
1956-02-01,277
|
||||
1956-03-01,317
|
||||
1956-04-01,313
|
||||
1956-05-01,318
|
||||
1956-06-01,374
|
||||
1956-07-01,413
|
||||
1956-08-01,405
|
||||
1956-09-01,355
|
||||
1956-10-01,306
|
||||
1956-11-01,271
|
||||
1956-12-01,306
|
||||
1957-01-01,315
|
||||
1957-02-01,301
|
||||
1957-03-01,356
|
||||
1957-04-01,348
|
||||
1957-05-01,355
|
||||
1957-06-01,422
|
||||
1957-07-01,465
|
||||
1957-08-01,467
|
||||
1957-09-01,404
|
||||
1957-10-01,347
|
||||
1957-11-01,305
|
||||
1957-12-01,336
|
||||
1958-01-01,340
|
||||
1958-02-01,318
|
||||
1958-03-01,362
|
||||
1958-04-01,348
|
||||
1958-05-01,363
|
||||
1958-06-01,435
|
||||
1958-07-01,491
|
||||
1958-08-01,505
|
||||
1958-09-01,404
|
||||
1958-10-01,359
|
||||
1958-11-01,310
|
||||
1958-12-01,337
|
||||
1959-01-01,360
|
||||
1959-02-01,342
|
||||
1959-03-01,406
|
||||
1959-04-01,396
|
||||
1959-05-01,420
|
||||
1959-06-01,472
|
||||
1959-07-01,548
|
||||
1959-08-01,559
|
||||
1959-09-01,463
|
||||
1959-10-01,407
|
||||
1959-11-01,362
|
||||
1959-12-01,405
|
||||
1960-01-01,417
|
||||
1960-02-01,391
|
||||
1960-03-01,419
|
||||
1960-04-01,461
|
||||
1960-05-01,472
|
||||
1960-06-01,535
|
||||
1960-07-01,622
|
||||
1960-08-01,606
|
||||
1960-09-01,508
|
||||
1960-10-01,461
|
||||
1960-11-01,390
|
||||
1960-12-01,432
|
||||
|
1
utils/__init__.py
Executable file
1
utils/__init__.py
Executable file
@ -0,0 +1 @@
|
||||
# Empty __init__.py to make utils a package
|
||||
BIN
utils/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
utils/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
utils/__pycache__/__init__.cpython-312.pyc
Executable file
BIN
utils/__pycache__/__init__.cpython-312.pyc
Executable file
Binary file not shown.
BIN
utils/__pycache__/file_handling.cpython-311.pyc
Executable file
BIN
utils/__pycache__/file_handling.cpython-311.pyc
Executable file
Binary file not shown.
BIN
utils/__pycache__/file_handling.cpython-312.pyc
Executable file
BIN
utils/__pycache__/file_handling.cpython-312.pyc
Executable file
Binary file not shown.
BIN
utils/__pycache__/forecast_history.cpython-311.pyc
Executable file
BIN
utils/__pycache__/forecast_history.cpython-311.pyc
Executable file
Binary file not shown.
BIN
utils/__pycache__/forecast_history.cpython-312.pyc
Executable file
BIN
utils/__pycache__/forecast_history.cpython-312.pyc
Executable file
Binary file not shown.
15
utils/file_handling.py
Executable file
15
utils/file_handling.py
Executable file
@ -0,0 +1,15 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
def allowed_file(filename):
|
||||
return '.' in filename and filename.rsplit('.', 1)[1].lower() in {'csv', 'xls', 'xlsx'}
|
||||
|
||||
def read_file(filepath):
|
||||
if filepath.endswith('.csv'):
|
||||
return pd.read_csv(filepath)
|
||||
else:
|
||||
return pd.read_excel(filepath)
|
||||
|
||||
def save_processed_file(processed_df, filepath):
|
||||
processed_df.to_csv(os.path.join(os.path.dirname(filepath), 'processed_' + os.path.basename(filepath)))
|
||||
return 'processed_' + os.path.basename(filepath)
|
||||
58
utils/forecast_history.py
Executable file
58
utils/forecast_history.py
Executable file
@ -0,0 +1,58 @@
|
||||
import pandas as pd
|
||||
import io
|
||||
from flask import send_file
|
||||
|
||||
|
||||
def update_forecast_history(session, train_percent, test_percent, forecast_periods, model_type, metrics,
|
||||
add_to_existing=False):
|
||||
new_entry = {
|
||||
'train_percent': train_percent * 100,
|
||||
'test_percent': test_percent * 100,
|
||||
'forecast_periods': forecast_periods,
|
||||
'mae': metrics['MAE'] if metrics else None,
|
||||
'mse': metrics['MSE'] if metrics else None,
|
||||
'rmse': metrics['RMSE'] if metrics else None,
|
||||
'model_type': model_type
|
||||
}
|
||||
forecast_history = session.get('forecast_history', [])
|
||||
if not any(entry['train_percent'] == new_entry['train_percent'] and
|
||||
entry['test_percent'] == new_entry['test_percent'] and
|
||||
entry['forecast_periods'] == new_entry['forecast_periods'] and
|
||||
entry['model_type'] == new_entry['model_type']
|
||||
for entry in forecast_history):
|
||||
forecast_history.append(new_entry)
|
||||
session['forecast_history'] = forecast_history
|
||||
if add_to_existing:
|
||||
session['selected_indices'] = session.get('selected_indices', []) + [len(forecast_history) - 1]
|
||||
else:
|
||||
session['selected_indices'] = [len(forecast_history) - 1]
|
||||
session.modified = True
|
||||
|
||||
|
||||
def download_forecast_history(session):
|
||||
forecast_history = session.get('forecast_history', [])
|
||||
if not forecast_history:
|
||||
return None, 'No forecast history available'
|
||||
|
||||
# Create DataFrame for forecast history
|
||||
df = pd.DataFrame(forecast_history)
|
||||
df = df.rename(columns={
|
||||
'train_percent': 'Train Percent (%)',
|
||||
'test_percent': 'Test Percent (%)',
|
||||
'forecast_periods': 'Forecast Periods',
|
||||
'mae': 'MAE',
|
||||
'mse': 'MSE',
|
||||
'rmse': 'RMSE',
|
||||
'model_type': 'Model'
|
||||
})
|
||||
df.insert(0, 'Run', range(1, len(df) + 1))
|
||||
|
||||
# Save to Excel
|
||||
output = io.BytesIO()
|
||||
df.to_excel(output, index=False)
|
||||
output.seek(0)
|
||||
|
||||
return send_file(output,
|
||||
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
as_attachment=True,
|
||||
download_name='forecast_history.xlsx'), None
|
||||
247
venv/bin/Activate.ps1
Normal file
247
venv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
69
venv/bin/activate
Normal file
69
venv/bin/activate
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=/home/ilgaz/Time-Series-Analysis/venv
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(venv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(venv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
26
venv/bin/activate.csh
Normal file
26
venv/bin/activate.csh
Normal file
@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/ilgaz/Time-Series-Analysis/venv
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(venv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
venv/bin/activate.fish
Normal file
69
venv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/ilgaz/Time-Series-Analysis/venv
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
||||
end
|
||||
8
venv/bin/cygdb
Executable file
8
venv/bin/cygdb
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Debugger.Cygdb import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/cython
Executable file
8
venv/bin/cython
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Compiler.Main import setuptools_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(setuptools_main())
|
||||
8
venv/bin/cythonize
Executable file
8
venv/bin/cythonize
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Build.Cythonize import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/f2py
Executable file
8
venv/bin/f2py
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from numpy.f2py.f2py2e import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/flask
Executable file
8
venv/bin/flask
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flask.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/fonttools
Executable file
8
venv/bin/fonttools
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from fontTools.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/gunicorn
Executable file
8
venv/bin/gunicorn
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from gunicorn.app.wsgiapp import run
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run())
|
||||
8
venv/bin/install_cmdstan
Executable file
8
venv/bin/install_cmdstan
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from cmdstanpy.install_cmdstan import __main__
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(__main__())
|
||||
8
venv/bin/install_cxx_toolchain
Executable file
8
venv/bin/install_cxx_toolchain
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from cmdstanpy.install_cxx_toolchain import __main__
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(__main__())
|
||||
8
venv/bin/pip
Executable file
8
venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3
Executable file
8
venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3.11
Executable file
8
venv/bin/pip3.11
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/plotly_get_chrome
Executable file
8
venv/bin/plotly_get_chrome
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from plotly.io._kaleido import get_chrome
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(get_chrome())
|
||||
8
venv/bin/pyftmerge
Executable file
8
venv/bin/pyftmerge
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from fontTools.merge import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pyftsubset
Executable file
8
venv/bin/pyftsubset
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from fontTools.subset import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
venv/bin/python
Symbolic link
1
venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
||||
1
venv/bin/python3
Symbolic link
1
venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
venv/bin/python3.11
Symbolic link
1
venv/bin/python3.11
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
||||
8
venv/bin/tqdm
Executable file
8
venv/bin/tqdm
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from tqdm.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/ttx
Executable file
8
venv/bin/ttx
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ilgaz/Time-Series-Analysis/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from fontTools.ttx import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@ -0,0 +1,19 @@
|
||||
The original Pyrex code as of 2006-04 is licensed under the following
|
||||
license: "Copyright stuff: Pyrex is free of restrictions. You may use,
|
||||
redistribute, modify and distribute modified versions."
|
||||
|
||||
------------------
|
||||
|
||||
Cython, which derives from Pyrex, is licensed under the Apache 2.0
|
||||
Software License. More precisely, all modifications and new code
|
||||
made to go from Pyrex to Cython are so licensed.
|
||||
|
||||
See LICENSE.txt for more details.
|
||||
|
||||
------------------
|
||||
|
||||
The output of a Cython compilation is NOT considered a derivative
|
||||
work of Cython. Specifically, though the compilation process may
|
||||
embed snippets of varying lengths into the final output, these
|
||||
snippets, as embedded in the output, do not encumber the resulting
|
||||
output with any license restrictions.
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
@ -0,0 +1,63 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Cython
|
||||
Version: 3.0.11
|
||||
Summary: The Cython compiler for writing C extensions in the Python language.
|
||||
Home-page: https://cython.org/
|
||||
Author: Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al.
|
||||
Author-email: cython-devel@python.org
|
||||
License: Apache-2.0
|
||||
Project-URL: Documentation, https://cython.readthedocs.io/
|
||||
Project-URL: Donate, https://cython.readthedocs.io/en/latest/src/donating.html
|
||||
Project-URL: Source Code, https://github.com/cython/cython
|
||||
Project-URL: Bug Tracker, https://github.com/cython/cython/issues
|
||||
Project-URL: User Group, https://groups.google.com/g/cython-users
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Programming Language :: C
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Topic :: Software Development :: Code Generators
|
||||
Classifier: Topic :: Software Development :: Compilers
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
|
||||
License-File: LICENSE.txt
|
||||
License-File: COPYING.txt
|
||||
|
||||
The Cython language makes writing C extensions for the Python language as
|
||||
easy as Python itself. Cython is a source code translator based on Pyrex_,
|
||||
but supports more cutting edge functionality and optimizations.
|
||||
|
||||
The Cython language is a superset of the Python language (almost all Python
|
||||
code is also valid Cython code), but Cython additionally supports optional
|
||||
static typing to natively call C functions, operate with C++ classes and
|
||||
declare fast C types on variables and class attributes. This allows the
|
||||
compiler to generate very efficient C code from Cython code.
|
||||
|
||||
This makes Cython the ideal language for writing glue code for external
|
||||
C/C++ libraries, and for fast C modules that speed up the execution of
|
||||
Python code.
|
||||
|
||||
Note that for one-time builds, e.g. for CI/testing, on platforms that are not
|
||||
covered by one of the wheel packages provided on PyPI *and* the pure Python wheel
|
||||
that we provide is not used, it is substantially faster than a full source build
|
||||
to install an uncompiled (slower) version of Cython with::
|
||||
|
||||
pip install Cython --install-option="--no-cython-compile"
|
||||
|
||||
.. _Pyrex: https://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
|
||||
448
venv/lib/python3.11/site-packages/Cython-3.0.11.dist-info/RECORD
Normal file
448
venv/lib/python3.11/site-packages/Cython-3.0.11.dist-info/RECORD
Normal file
@ -0,0 +1,448 @@
|
||||
../../../bin/cygdb,sha256=rW0ifb1l1CbLA7TELvRDZICAn5sH749jfkG0LY_bIfA,253
|
||||
../../../bin/cython,sha256=8x7sdNEE6gMvjG6W6_d65RLPWm4fNF-8pR85vRwL_K0,274
|
||||
../../../bin/cythonize,sha256=UL0GsENaFwL9EVukgpqbcoX8Bp8LrsBY0t0gReXf3Vs,254
|
||||
Cython-3.0.11.dist-info/COPYING.txt,sha256=4escSahQjoFz2sMBV-SmQ5pErYhGGUdGxCT7w_wrldc,756
|
||||
Cython-3.0.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Cython-3.0.11.dist-info/LICENSE.txt,sha256=lWiisVXmasPguh_YC1K4J7lGDmz28jMSXny8qOIG3cM,10174
|
||||
Cython-3.0.11.dist-info/METADATA,sha256=Olx-spBXM1wCz95fdBSflhN68RCfBJpc64eq2YTsU4M,3161
|
||||
Cython-3.0.11.dist-info/RECORD,,
|
||||
Cython-3.0.11.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
Cython-3.0.11.dist-info/WHEEL,sha256=5Y454O0emXuKMOybDQevJmMdyxBqrxVayhyCIJ_fxN0,151
|
||||
Cython-3.0.11.dist-info/entry_points.txt,sha256=VU8NX8gnQyFbyqiWMzfh9BHvYMuoQRS3Nbm3kKcKQeY,139
|
||||
Cython-3.0.11.dist-info/top_level.txt,sha256=jLV8tZV98iCbIfiJR4DVzTX5Ru1Y_pYMZ59wkMCe6SY,24
|
||||
Cython/Build/BuildExecutable.py,sha256=jl36W_HYIHVuVUtN6iBnDBKA4oZsT1Z3bYx1AR9G8Ys,4789
|
||||
Cython/Build/Cythonize.py,sha256=HYib-gx-ARKQc5p-GNOwVzYF0TzUEbzvl0sS-nOGyoI,9830
|
||||
Cython/Build/Dependencies.py,sha256=B4aRLzFxuc7kUtRUcmAxVQh4s9LRGZpVUyOGEYdnPq4,52930
|
||||
Cython/Build/Distutils.py,sha256=iO5tPX84Kc-ZWMocfuQbl_PqyC9HGGIRS-NiKI60-ZE,49
|
||||
Cython/Build/Inline.py,sha256=E4IRWhPzRuoaYF0hbAxBLLP5j2aUXWhHZo5JL8uKC2k,13387
|
||||
Cython/Build/IpythonMagic.py,sha256=D8BRzCzPt5Dji7PqTdZEU9NVxmAo65COaCLE5wLwn8c,21966
|
||||
Cython/Build/Tests/TestCyCache.py,sha256=4p0k5OfCdWCUVzp_-iVyYTYB9ey6mlKF92WxEehj0ZM,4467
|
||||
Cython/Build/Tests/TestCythonizeArgsParser.py,sha256=_ijPP5tDvaeiUtuxUawlOZ8P4dcnkOOUJoze2OAbW4A,20346
|
||||
Cython/Build/Tests/TestDependencies.py,sha256=Bt7ERe6WQoZaVjYDZGPuNzYZyv5dEIbtj59-9AK_AlY,5835
|
||||
Cython/Build/Tests/TestInline.py,sha256=Ct_KmRN-hhdrx91xB_RILcjaPA4_bxZiCD-8F8NXiUo,3487
|
||||
Cython/Build/Tests/TestIpythonMagic.py,sha256=uQECO6yjMmcqLbx5fQ17TP0QJdOvc2hgAuLqFWWeKTw,9411
|
||||
Cython/Build/Tests/TestRecythonize.py,sha256=6un9tt8-I1YiFJo9xXRWqe0aUndMfHwrwA0h81Emhwc,6276
|
||||
Cython/Build/Tests/TestStripLiterals.py,sha256=D6F9NRbQXO7bHdqugpUnJF8iSjqs8xW_99ViP9ouhzc,1549
|
||||
Cython/Build/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Build/Tests/__pycache__/TestCyCache.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestCythonizeArgsParser.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestDependencies.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestInline.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestRecythonize.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Build/__init__.py,sha256=cxv1BKTFfuE10D5-MObSiFogR4dUpaQYFz-CLaHj9KU,401
|
||||
Cython/Build/__pycache__/BuildExecutable.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Cythonize.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Dependencies.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Distutils.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Inline.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/IpythonMagic.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/CodeWriter.py,sha256=Yy4_ZSzZgnZ9_FmawxqBKd4frnshp1b7GupWuU6iKl0,24546
|
||||
Cython/Compiler/AnalysedTreeTransforms.py,sha256=LuwGJWnk-scpz_9TnKw1Lq0j4yNf5vCGO_NQ-GGupas,3834
|
||||
Cython/Compiler/Annotate.py,sha256=pYe_z56bqtzhSx3hvLS1K8ML36jMjWlFeaek_drOdLs,14153
|
||||
Cython/Compiler/AutoDocTransforms.py,sha256=sOPbdvRyU9c2k382ZtgQSFtjG7Jm21n5UAXpLAGLDYE,11738
|
||||
Cython/Compiler/Buffer.py,sha256=FalM1FoOOh9t9hXBofnSP5NKM5iYDLpnpZVweis_qrM,29304
|
||||
Cython/Compiler/Builtin.py,sha256=pIh5cqcFCb2-eW5oN3rifcG0h-eV-d5oPmLm_3FB7O0,32067
|
||||
Cython/Compiler/CmdLine.py,sha256=_m_rMfIT48gl1CrqpY2h7QfRaWjyTHV3YBRBThd7bUk,12526
|
||||
Cython/Compiler/Code.cpython-311-x86_64-linux-gnu.so,sha256=TZRhA8tbib2yQ3uv6brSMl1ipEOTXzZ00xyjx_EVKjk,1342888
|
||||
Cython/Compiler/Code.pxd,sha256=sx9jxBWoCmvfm_aR9IsfnqE69iNFn_fFe5y-WMfRqEQ,3548
|
||||
Cython/Compiler/Code.py,sha256=QOtZC7V2v98BrLPZSA9gA3NcvL2vEm7mwMooUczvADI,104762
|
||||
Cython/Compiler/CodeGeneration.py,sha256=jkcx2uX07nck0UZSgysIThRuJiPbdkSeXR4Z2uzbQU8,1108
|
||||
Cython/Compiler/CythonScope.py,sha256=4GqELgqDRlQ7bl1FKb47XrcWyYFDYgVstCXHr7yQKUQ,6863
|
||||
Cython/Compiler/Dataclass.py,sha256=Awfvdai0Yn2GA6IZSYvYP4WXRgoMfT9I_HD4gHudjGk,36032
|
||||
Cython/Compiler/DebugFlags.py,sha256=5Zg9ETp0qPFEma6QMtrGUwu9Fn6NTYMBMWPI_GxFW0A,623
|
||||
Cython/Compiler/Errors.py,sha256=_RsCSRTURcZaTtZeGp1TAJiSaXQDLkVBdWv6qa49AW0,9312
|
||||
Cython/Compiler/ExprNodes.py,sha256=rvxKVAu2gxiWd6YcaTxrOB0KVN_TivcFTmV8yZ7i-0E,602024
|
||||
Cython/Compiler/FlowControl.cpython-311-x86_64-linux-gnu.so,sha256=sse5GrBzH1Zy2XbUQI81JYkbjE4dTtTUA_VtHBQcf5Q,688080
|
||||
Cython/Compiler/FlowControl.pxd,sha256=C6se5i0mW-m-wUCa7HOzcuGW4R8All8HOBsIB66p_0A,2979
|
||||
Cython/Compiler/FlowControl.py,sha256=3UrRUUR9SliUR7zf_3qATAHd-Xs5ak-bq_QvFtqR60Y,48862
|
||||
Cython/Compiler/FusedNode.cpython-311-x86_64-linux-gnu.so,sha256=AkmMt-MwoafMrisUKTjDw8AdgHUdBvmt5jusOfKraBo,517064
|
||||
Cython/Compiler/FusedNode.py,sha256=NhZwm2QjIVyHY8Nghz0BIWQn6mXtKpr8VY8PG_0E8OA,43360
|
||||
Cython/Compiler/Future.py,sha256=NFtSWCJYqPlEqWZ5Ob_bv_pDfW6iS7pPYWeGH1OGA5g,629
|
||||
Cython/Compiler/Interpreter.py,sha256=6wJEJMtz22OoVi49qUZn9ILYevb50vqjqKAteu7lh04,2114
|
||||
Cython/Compiler/Lexicon.py,sha256=LmKi6ZqoC-NZDIL86U8L4EGkWVLgHojZQNgUKECkBco,21772
|
||||
Cython/Compiler/Main.py,sha256=vv59zMvZnHI5oIkJeTxZSRLYx555TkXu_CcTVzXufkY,32205
|
||||
Cython/Compiler/MemoryView.py,sha256=yTDgrvFvjcjIURylob910mMQNTPPmXg7Gh_drqALAfg,30382
|
||||
Cython/Compiler/ModuleNode.py,sha256=uhiBxur0D9s-54PCIZRhyVoR7ASHCM4FjH2qVdtVHb0,184020
|
||||
Cython/Compiler/Naming.py,sha256=YTKzc5XKvkx8vAqDZmgl45ExnR0QSbZiiNhIev5K1jQ,8160
|
||||
Cython/Compiler/Nodes.py,sha256=fuNGOVJP6amUxJCgV7bsfulV_OTbmadmxTRRL4xPm8o,442392
|
||||
Cython/Compiler/Optimize.py,sha256=s_sPuYACU4oVACRcoAKoFecyVZ5eU9FfITHhCQahYrw,226603
|
||||
Cython/Compiler/Options.py,sha256=ci8mtRDVvl-Mtmcn2UUT40PwAZNjWQ54J4T4zrAQs10,30646
|
||||
Cython/Compiler/ParseTreeTransforms.pxd,sha256=1YVH1stZcLYswszLUBMJd4W8wPTik4MCpG4yfNeSh7Y,2583
|
||||
Cython/Compiler/ParseTreeTransforms.py,sha256=oDECNyLNwVF3wEuM03DdrLg9fnqH-nEA8Ka1O0TPVqo,171534
|
||||
Cython/Compiler/Parsing.cpython-311-x86_64-linux-gnu.so,sha256=cQpeVpAAqGl5Dkdxp3dYk1-stpw7aZroi75lufK_mKI,1170440
|
||||
Cython/Compiler/Parsing.pxd,sha256=LomEHtmDKOIroiWpJbrHEcz9-wQP3Jl7jj8PjuLVG10,9166
|
||||
Cython/Compiler/Parsing.py,sha256=kRT-rhRpEEwTk3fNABKm0uRh6is5yXH-YCc1U4YENIU,139740
|
||||
Cython/Compiler/Pipeline.py,sha256=jNfU_et2wc_HfSSY3LA633OK5FmScceK1zOZKsvlAtM,15631
|
||||
Cython/Compiler/PyrexTypes.py,sha256=V_bCmCHwOUkpidMt8APwd02ScK4wbRRHF-U5cbLuIPA,208736
|
||||
Cython/Compiler/Pythran.py,sha256=NHIml0yx0jPLyTLRAHXZr0LHTyEyfYqspgYuV4vdNKI,7267
|
||||
Cython/Compiler/Scanning.cpython-311-x86_64-linux-gnu.so,sha256=NbvXcI5h1rLUcExxOQGNPq5nvKMD2foDIotQhF9v_-Y,340320
|
||||
Cython/Compiler/Scanning.pxd,sha256=ivsDKJzsh6FggfiybEVq3NJ_b6_qIHvi8rPkDLSVTtE,2071
|
||||
Cython/Compiler/Scanning.py,sha256=w-aGbQMGXqNBckE87bFDTl1w_lHN4H4-SCwMSh-gZvI,20114
|
||||
Cython/Compiler/StringEncoding.py,sha256=wepb0J7NZ9HhahxC0DqL41fWsVl-2I6l95UL1jdYn68,11728
|
||||
Cython/Compiler/Symtab.py,sha256=o0q6PYxCE5XFsjaahIjofZfQ-dttPcK7i5vYS5kMqPU,129778
|
||||
Cython/Compiler/Tests/TestBuffer.py,sha256=SDAkH2fjCsiaAisWQcBdNX3JxG1KcEEIQJbsnNeIWuk,4156
|
||||
Cython/Compiler/Tests/TestCmdLine.py,sha256=ONYLPSMwq4kEYMje-TmQuWzEgNkWkD-RKTmvWDvxtsk,21844
|
||||
Cython/Compiler/Tests/TestFlowControl.py,sha256=ge3iqBor6xe5MLaLbOtw7ETntJnAh8EequF1aetVzMw,1848
|
||||
Cython/Compiler/Tests/TestGrammar.py,sha256=SbHXzaLnYEcN_D2q6mOIcb0Rd-6cof49LCCYjIPFY4g,5128
|
||||
Cython/Compiler/Tests/TestMemView.py,sha256=kytAp-r-DMTuviujoFcA7DhsiofOjnArA0WpJ2j3E1o,2517
|
||||
Cython/Compiler/Tests/TestParseTreeTransforms.py,sha256=XbGkHi_fqoZENjQZYp7cMReeF-zg-mpujNaw5LjvdgY,8926
|
||||
Cython/Compiler/Tests/TestScanning.py,sha256=SpVM6-4MstnJQUuvFEhXZBEy8n2oPkPguvsb9Y3kI4o,4771
|
||||
Cython/Compiler/Tests/TestSignatureMatching.py,sha256=tDlQks1mgo2MIPBW_uC5YkoZt0RjPGYAdluk1j82IvM,3342
|
||||
Cython/Compiler/Tests/TestStringEncoding.py,sha256=RL1YDXrOUe1sPLEbWmTJQ5VF-uEZ_KLz0jaeQoMx85k,2315
|
||||
Cython/Compiler/Tests/TestTreeFragment.py,sha256=0VywSuhoyluLITx0w-BQ8HYES3TQ5UW0NcGQhjX4qxk,2166
|
||||
Cython/Compiler/Tests/TestTreePath.py,sha256=VHOJU30i8GuDWQo3gUL8he0C7wKHENI5wy9t6KU-qII,4192
|
||||
Cython/Compiler/Tests/TestTypes.py,sha256=Uo1wWZPgaRbv-leWXmuoqXx_HkY9oPgBAIRI5XulF-Y,3334
|
||||
Cython/Compiler/Tests/TestUtilityLoad.py,sha256=5zuAYD_RuRW_KDl2cfA9aXCJ_G-LlDA9pIF4zbqeZlg,3923
|
||||
Cython/Compiler/Tests/TestVisitor.py,sha256=QAnBpUhnirSFKqXWiawo-OhXhxIRTQidWxEzGjJDz6M,2228
|
||||
Cython/Compiler/Tests/Utils.py,sha256=ChgJ0EeGJRc_ZkNVjZFvFzk1tOj6dxjkW6X1BBot1Hc,1065
|
||||
Cython/Compiler/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Compiler/Tests/__pycache__/TestBuffer.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestCmdLine.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestFlowControl.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestGrammar.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestMemView.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestParseTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestScanning.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestSignatureMatching.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestStringEncoding.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTreeFragment.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTreePath.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTypes.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestUtilityLoad.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestVisitor.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/Utils.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Compiler/TreeFragment.py,sha256=GuKBbSDCwWD77FacP9iHVvBHghBBfYrAvIgB4mxW5cE,9709
|
||||
Cython/Compiler/TreePath.py,sha256=3_lScMAd2Sly2ekZ8HO8dyZstGSruINl2MXXq9OYd2Q,7641
|
||||
Cython/Compiler/TypeInference.py,sha256=PQHeR9d8EFloyWVuVv6CNPri4G9nTNPyCQzcsfgNeBs,22725
|
||||
Cython/Compiler/TypeSlots.py,sha256=s0fmpCT07y85yDnrNZsa_DZvdfc07SiVVX-f_9hxf_g,50443
|
||||
Cython/Compiler/UFuncs.py,sha256=KNCawVbwvmUanYTqxG73AJhlIRFIJA6BjYLcKWiidiQ,9166
|
||||
Cython/Compiler/UtilNodes.py,sha256=QYfoLIIYKlbmG-IoO2-hTJDShl9pM12SAHb_G5QPwgY,12463
|
||||
Cython/Compiler/UtilityCode.py,sha256=yRSZAzXEQq4AuGR278ZFiZCXSoArylcf_cCgVsXDRnA,10952
|
||||
Cython/Compiler/Version.py,sha256=f2mS6aYYdu0DMRK3B4IuzMlCo-k-ffmehCao_vKlTdk,181
|
||||
Cython/Compiler/Visitor.cpython-311-x86_64-linux-gnu.so,sha256=hZfxmFgE9sDFW4TSmodE8OK8ZgCqppVAJJn7u8uh3sw,374848
|
||||
Cython/Compiler/Visitor.pxd,sha256=AsYd6v_MyVWzs2AVj0siG_92JGnKhxEYk89oAbhHa64,1814
|
||||
Cython/Compiler/Visitor.py,sha256=c5Xu4v2LTwhxBp_Ign24WXMThddN0OZ_7gZ53aCHOAo,31627
|
||||
Cython/Compiler/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Compiler/__pycache__/AnalysedTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Annotate.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/AutoDocTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Buffer.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Builtin.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Code.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CodeGeneration.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CythonScope.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Dataclass.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Errors.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ExprNodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/FlowControl.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/FusedNode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Future.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Interpreter.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Main.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/MemoryView.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ModuleNode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Naming.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Nodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Optimize.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Options.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ParseTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Parsing.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Pipeline.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Pythran.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Scanning.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Symtab.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TreeFragment.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TreePath.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TypeInference.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/UFuncs.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/UtilNodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/UtilityCode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Version.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Visitor.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Coverage.py,sha256=prYNxj3ML6UaI_UVOhROd54GBwRgrt_Fa71vVlrEPbk,18461
|
||||
Cython/Debugger/Cygdb.py,sha256=8k5Wz09MSQdiUtCVtb0dMukKqZV7E8fvB9Yvv3rfJCI,6911
|
||||
Cython/Debugger/DebugWriter.py,sha256=OoywNqkq5IyRRAvlcaaZ05zDrJMgNgg7O6wMVdltr5k,2486
|
||||
Cython/Debugger/Tests/TestLibCython.py,sha256=T6qGlhSlrXwI5cn2OpWa82tZ51UjVm9qmmk9Mfho1p4,8455
|
||||
Cython/Debugger/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Debugger/Tests/__pycache__/TestLibCython.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/test_libcython_in_gdb.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/test_libpython_in_gdb.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/cfuncs.c,sha256=4SZurmnz5J1SiIs9N26Eu4zc2wvF_qMEKaN0eTcbDPo,71
|
||||
Cython/Debugger/Tests/codefile,sha256=axsI884lThsoLMg2vlQJ6BPG8t9vil0mTDs_Pi7vuwI,642
|
||||
Cython/Debugger/Tests/test_libcython_in_gdb.py,sha256=8JDbj3CbmQrB7cFP7yThiHlgIS4QrdvQYMLGEoHuE7A,18048
|
||||
Cython/Debugger/Tests/test_libpython_in_gdb.py,sha256=fjXO3VgT0sYcAaV2vpx2oJDJ7MsQmXTG3IOge7QnnU0,4049
|
||||
Cython/Debugger/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Debugger/__pycache__/Cygdb.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/DebugWriter.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/libcython.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/libpython.cpython-311.pyc,,
|
||||
Cython/Debugger/libcython.py,sha256=70z-fpmKeAj7Blk9FImlDXfFyNsU848IPzaWZYDITvE,46538
|
||||
Cython/Debugger/libpython.py,sha256=crnRuil2PPaXZz0wp6AZ-dabnFXzji0PcLQyMJB9e5Y,94004
|
||||
Cython/Debugging.py,sha256=vFtJhn7QstMf5gnYru2qHIz5ZjPg1KSlZVGHr-pBCwM,552
|
||||
Cython/Distutils/__init__.py,sha256=uyWaN2NJ_mKYLzVsDPi0qZCdIYoW5M_7YYEmAOIL3Ek,98
|
||||
Cython/Distutils/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/build_ext.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/extension.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/old_build_ext.cpython-311.pyc,,
|
||||
Cython/Distutils/build_ext.py,sha256=QCoEpEMz3NQ2s6kQtUB743zeuIFVdIz5VkADtqYbkeE,5708
|
||||
Cython/Distutils/extension.py,sha256=tnMDR7SMrWDh2tQjC7fHtptlKhYXvf0HiveMLGQjIJ0,4640
|
||||
Cython/Distutils/old_build_ext.py,sha256=jb7aERJvis6xrIJCPnRQjF2O-xjj4jYltMvDRodQAtg,13825
|
||||
Cython/Includes/cpython/__init__.pxd,sha256=imlSN5iR-xZ9Ep9tw_6WqTLaVn6vO5tXYHhVxv1u82o,8306
|
||||
Cython/Includes/cpython/array.pxd,sha256=0laAK0ujw6gtzgfqzWWmckRQe0-SQ5smhAqXKNpJCWg,6367
|
||||
Cython/Includes/cpython/bool.pxd,sha256=5RU6XY57iZ-xtngjXswJoBOxn1ClZdkcXWHQiZ2k6fg,1358
|
||||
Cython/Includes/cpython/buffer.pxd,sha256=wm7aHygGUof_H3-JyICOek_xiU6Oks178ark1Nfk-a0,4870
|
||||
Cython/Includes/cpython/bytearray.pxd,sha256=m0VdoHgouF1T0VtRjFLXZ5fi22vaMdVwFWpF3IxB6m4,1443
|
||||
Cython/Includes/cpython/bytes.pxd,sha256=OH9krgA2CLdcNJWOM0PpgMskbh5vnq6fjjj1lzYOhOU,10066
|
||||
Cython/Includes/cpython/cellobject.pxd,sha256=DXdTjSN1RP1m4CsaGuggyIA1nGiIO4Kr7-c0ZWfrpRo,1390
|
||||
Cython/Includes/cpython/ceval.pxd,sha256=h6fBetZCUvWTcCn3bkXZg2kqnIuyC5ZSChyhOocxVus,236
|
||||
Cython/Includes/cpython/cobject.pxd,sha256=ZeMdbpZLqpcTywdv2VoppMTWD4X_yghL6Qox7LVfOyg,1524
|
||||
Cython/Includes/cpython/codecs.pxd,sha256=3fyudEljkNGQ7e3dJPst6udXGcAeNKvlMK9U8EB1gXc,5084
|
||||
Cython/Includes/cpython/complex.pxd,sha256=B_ondPAPNM7nSJtgMPKZgWxFHldPxBqG63spwK4t9_Y,1842
|
||||
Cython/Includes/cpython/contextvars.pxd,sha256=HoNxGtIIZOLEuSclvOizjkSAwhLtOZWzg8j8YZa_RT8,5731
|
||||
Cython/Includes/cpython/conversion.pxd,sha256=dbbFuZJF0SscmcaNCUf0tlBQDRdKYf5tH8yzhTU_XYI,1696
|
||||
Cython/Includes/cpython/datetime.pxd,sha256=uoyukvbgigbAVXMVUFd6pwvUViAUHfZvuNPiA0nqETU,15793
|
||||
Cython/Includes/cpython/descr.pxd,sha256=RPSPJUxyejKsWruYS3IWU1rg0L1pKFAYidYcXW9YAj0,728
|
||||
Cython/Includes/cpython/dict.pxd,sha256=U1FHJRnYf2GZcVReNEATzeOa8s5PlAD539MQigbOASc,7939
|
||||
Cython/Includes/cpython/exc.pxd,sha256=0pI7VcDnMLqf-S_BClRgoiH2xGyDbhlmFGWOKcn3sGM,13830
|
||||
Cython/Includes/cpython/fileobject.pxd,sha256=yQG3M9wfS2jwpgSTo-8oXx8K9xnpGIkL-etQt9YDwTU,2889
|
||||
Cython/Includes/cpython/float.pxd,sha256=Gmf5SzLRCZOMd-deD35PgOlRGsrcwhQw__E4igmqKdc,1650
|
||||
Cython/Includes/cpython/function.pxd,sha256=IoJUprbz8F10DEKh-vSSpY6nWkCHw7SqG9p2f-4gHek,2671
|
||||
Cython/Includes/cpython/genobject.pxd,sha256=emC1JPgkuvBbGC0rgeZapKDaXYEj48uWiDC-xF0Mx2I,1052
|
||||
Cython/Includes/cpython/getargs.pxd,sha256=268twKzdiAkQMXMsetNiNlNqaqzlKtiBENKbhOHd8x4,775
|
||||
Cython/Includes/cpython/instance.pxd,sha256=qCbxPeHKOJbuszDu3UEaI-KLX9lTopuaNCcpoHJ9ngU,985
|
||||
Cython/Includes/cpython/int.pxd,sha256=d9a0zUw_M3pRycCESWIjtfXWRvdvFOWxjdOjkcbX2gs,4131
|
||||
Cython/Includes/cpython/iterator.pxd,sha256=o52mLHbdm14Kqant2hR2zAdYzqK4fkSWZtBcRmpoP-I,1319
|
||||
Cython/Includes/cpython/iterobject.pxd,sha256=5UEZZwG5zyzxoCpknoQuh91zPUV11Uxr6F1taJdTv8k,1036
|
||||
Cython/Includes/cpython/list.pxd,sha256=HhnwchBGhPIAoObzIXyg33KqvSxBRveWoq34iZM508s,4096
|
||||
Cython/Includes/cpython/long.pxd,sha256=1gN-O5AcV4B_r974qxW9YDr7NedDyDrTRjOelClvoyA,7047
|
||||
Cython/Includes/cpython/longintrepr.pxd,sha256=czvKr3fQdYIwIRu3gojXssT9LFXH-nstM7f_lPt7lE4,480
|
||||
Cython/Includes/cpython/mapping.pxd,sha256=DI5_kOp78IaYx77qIWpetu13iMEgGXZew84mTsCPYtM,2692
|
||||
Cython/Includes/cpython/marshal.pxd,sha256=-Tl2w_7VfgzrCSq1gpBIEZRADw1g1zZNMdPXz4YJClE,2897
|
||||
Cython/Includes/cpython/mem.pxd,sha256=O8I4rWJj7VvrlYjPpR-Dhls5izYddgO5rNyAOAzWUQQ,5912
|
||||
Cython/Includes/cpython/memoryview.pxd,sha256=l97J5-hbH3hp9aMbdXp3n73hJFNNsng6uyh40pc8P7I,2504
|
||||
Cython/Includes/cpython/method.pxd,sha256=UWXflhIlP4y7B5XDbH9rQ15iADciGW-iqV1-dlw2Wwg,2196
|
||||
Cython/Includes/cpython/module.pxd,sha256=ahRxpmkz_KMZhnSk-ZrXn_kkSoUhNBxWXf9uPquXyis,10128
|
||||
Cython/Includes/cpython/number.pxd,sha256=tYJ0nn0k_llUx3ilniW9iXd2rKVejA-J5UUiIJ36Kww,11922
|
||||
Cython/Includes/cpython/object.pxd,sha256=1mMnUhoxDfCg7iCjQHvf4lsagJLZw9H0EhsLy4NggyM,20003
|
||||
Cython/Includes/cpython/oldbuffer.pxd,sha256=v0-YZ_Iwwj3ZQdM8VE5NPTQcbBlJdWwJGtNO9DonGgw,2916
|
||||
Cython/Includes/cpython/pycapsule.pxd,sha256=Z3-xhfFRIldr-SqznNaE5J0N0jlUvoa-I5sGTHzWTGg,5700
|
||||
Cython/Includes/cpython/pylifecycle.pxd,sha256=LziJZHclGdtsr3yT28fULHNZ_n67bs1DmI9s8YzrBGg,2000
|
||||
Cython/Includes/cpython/pyport.pxd,sha256=MfWCwvbMjd_qBvpmj5DuNUqGnTnLLEIx9pb8B1-dz_Y,222
|
||||
Cython/Includes/cpython/pystate.pxd,sha256=TQb-_El6K7h6ktpFkgfehi1VBe4KcUNscH7TG7Nv8W4,3779
|
||||
Cython/Includes/cpython/pythread.pxd,sha256=0375TaYmtNCDDkWBh9WY4oJ_jhoTxhu_RR5QiOsXmYg,1946
|
||||
Cython/Includes/cpython/ref.pxd,sha256=awtAD2o36UaGQbnDHVdOyH8J2Iiclo7D4Bj7XFJbaA0,2556
|
||||
Cython/Includes/cpython/sequence.pxd,sha256=UajXW6S_ssyCmYDDsXFiHGR9IUDMP3f6AuV4bBzh2Do,6006
|
||||
Cython/Includes/cpython/set.pxd,sha256=ewHRPVMbHUGDInZ3NziisCq68LvtmEJ-SXFbzmuJxLc,5383
|
||||
Cython/Includes/cpython/slice.pxd,sha256=Rzgn8diAsN7lS2xGTq4VZucV3ziFNra4oz4tKGEAkMo,3111
|
||||
Cython/Includes/cpython/string.pxd,sha256=6jHMVltzGLTLgmo_ndti22mTuQmNVyk9J9S48eyPbEo,9942
|
||||
Cython/Includes/cpython/time.pxd,sha256=rdv6S1n2jDiymQDTnf5dxjdgab5ia0SiO0R2cKjkaTk,2411
|
||||
Cython/Includes/cpython/tuple.pxd,sha256=DUUhJp4v23g0JOJ6OK3sGvHNgEz97Z9be8XBgZrqH0Y,3219
|
||||
Cython/Includes/cpython/type.pxd,sha256=qt8Hqz3DKGJuMgWJgP2JuCpUHiySYp8KCJTerJ4gnpI,2067
|
||||
Cython/Includes/cpython/unicode.pxd,sha256=68cguuI3cMJbspbqwRPuzmpjJfOsZe_IV10JvhMd-FQ,30635
|
||||
Cython/Includes/cpython/version.pxd,sha256=l5KXt04isEv3qbGRJZ8fNlCYGO24HsA2l4EM3RxTEhE,847
|
||||
Cython/Includes/cpython/weakref.pxd,sha256=UU9H_ovHG07FFgP_kY2xhGv3yJDr_8iujCZnxH2jnlo,1984
|
||||
Cython/Includes/libc/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Includes/libc/complex.pxd,sha256=m2ntA8NFZQG02UuY5YDGu-MrW-Avp0kSY82mhkR1Q8M,1224
|
||||
Cython/Includes/libc/errno.pxd,sha256=tt0CJaCDWZN4HGtzC5nP7D-hT-jjoYD9m0FOPizmRvc,2049
|
||||
Cython/Includes/libc/float.pxd,sha256=IhvZJljpTG0fZtcIp7EBO2Sqddozxoxwj4RFNVcKLpY,966
|
||||
Cython/Includes/libc/limits.pxd,sha256=xHlIyuDIKpjqclvRRYzZIcfd5G1re5QtbmoDMqZR_Ec,621
|
||||
Cython/Includes/libc/locale.pxd,sha256=sixG8EJ6wiVb0HIR1LWJ3lXTjTv463GJ9C_40HRovN4,1140
|
||||
Cython/Includes/libc/math.pxd,sha256=Hy0ewq4Xw2sWPvrokbrbpHw6r6azx8C1nRsNWtuMhUs,6581
|
||||
Cython/Includes/libc/setjmp.pxd,sha256=XRh-gSuhvFLl0nRvz5OhSWYe9eqX2attAck3JI7mwa4,297
|
||||
Cython/Includes/libc/signal.pxd,sha256=RmJeCLtWUfYFTtwiocZSV-gJtJrxFijkTYOZnvOk9Pw,1179
|
||||
Cython/Includes/libc/stddef.pxd,sha256=0rCyoocCfDL-1OQo3pxHQ-6fW20SAYktOLPoa4d97w8,164
|
||||
Cython/Includes/libc/stdint.pxd,sha256=qHJXzpWCrbvJWSaHYZL27VJPupQreTZl9VGj0jgLdRU,3449
|
||||
Cython/Includes/libc/stdio.pxd,sha256=qUaxEwNrQl1-4yHLorzzJZ-a-y5_-Rm_m7Z5meaRqH0,2476
|
||||
Cython/Includes/libc/stdlib.pxd,sha256=p62xq2XfB24WfNCjRXgD6cOYoRuV47AnYijkjWv4ugE,2444
|
||||
Cython/Includes/libc/string.pxd,sha256=tzYGbRrnccedFLes-KGgJqM0FEtwHF_q4f2fqltNvyE,2038
|
||||
Cython/Includes/libc/time.pxd,sha256=zeE7saukFU9k77SXjUlIJ2GWka-LdXCFVwinfL4sQx0,1354
|
||||
Cython/Includes/libcpp/__init__.pxd,sha256=PCx8ZRfOeoyMRu41PPlPY9uo2kZmt_7d0KR4Epzfe7c,94
|
||||
Cython/Includes/libcpp/algorithm.pxd,sha256=HaatOKA2pIHc-RNHCIWayPXLT2Hd56Q0gKC5kLlCYYc,23704
|
||||
Cython/Includes/libcpp/any.pxd,sha256=0HE8j4XF0bkCrxaYWE3DM1kV_2LhljH8WKh2ariwIWc,425
|
||||
Cython/Includes/libcpp/atomic.pxd,sha256=BDFpDe8SmSdiDkEUfzbh55hjkY8yCUVDyeeUcMOwiy8,1705
|
||||
Cython/Includes/libcpp/bit.pxd,sha256=Wd_4EoENOPZeqqhd0s--6TCOzeqPpUFfGNQibsqV9Ig,749
|
||||
Cython/Includes/libcpp/cast.pxd,sha256=En4LBubdinfpm9Rel077tK_LGwg_3k4FAu9mlIbKjuw,501
|
||||
Cython/Includes/libcpp/cmath.pxd,sha256=-_jnjIWY47jybkNnGrMk8ewZeGaWU0ezMWAZm9UCRk0,19935
|
||||
Cython/Includes/libcpp/complex.pxd,sha256=JtuQuknvS6GQ0FfyJGQ914DXvzNEaF1-z9D0jST6gXM,2995
|
||||
Cython/Includes/libcpp/deque.pxd,sha256=SwgYrnqq6OMQMSOEFTZpnpRsii7pIAT-06bLxMS5w7M,6718
|
||||
Cython/Includes/libcpp/execution.pxd,sha256=I2KizUy9DGm_0edrd2BFdHPwyeip2ZcDxqdwb0t7taI,515
|
||||
Cython/Includes/libcpp/forward_list.pxd,sha256=o2ThwKyJWZrNT4ZMB1aYmf-2wqYiqSCDdfVswpo8S8I,2429
|
||||
Cython/Includes/libcpp/functional.pxd,sha256=kMul7WB1J0X2-611AMtXq6sP9jYYk3YO8zZoDKFaeDU,722
|
||||
Cython/Includes/libcpp/iterator.pxd,sha256=UjkDqqKq6pHLiwgdUY730PbzAiTKKlhak6gkVd3jtsk,1512
|
||||
Cython/Includes/libcpp/limits.pxd,sha256=BWJzVBB8MZt3l9PUre1o5eScE2fGJa3_Sv6e_KH30Uw,1821
|
||||
Cython/Includes/libcpp/list.pxd,sha256=iOovgIk_Slkf7yaDEv6-ZUss_AU98OGWkvgNQDF0K0A,4438
|
||||
Cython/Includes/libcpp/map.pxd,sha256=C8EaEsvLEc2tmEkyybOzgkx3CoFWYFBpZelHhcKHI1s,10481
|
||||
Cython/Includes/libcpp/memory.pxd,sha256=OqNDPX_1ps9bxWCEQDiefbQv-NeZJ7SNUQtdYB86MZs,3593
|
||||
Cython/Includes/libcpp/numbers.pxd,sha256=SkBhbClhRTtzbSMj_QvR2pz-CjdB08ZXPJbXSwATzvw,395
|
||||
Cython/Includes/libcpp/numeric.pxd,sha256=dTdOmLBD5X5VGeTdYQqA_AXgJOr5f51_-rR7umo-w0Y,6571
|
||||
Cython/Includes/libcpp/optional.pxd,sha256=Mf5gnZIvB9IR-L7bi3ntog2EOXB-pp1Xo45CWqyRCiU,990
|
||||
Cython/Includes/libcpp/pair.pxd,sha256=UBJXw43uHkDlNsr0Pu1aP5tZ-ILXhUAyOLam2qdWmZA,27
|
||||
Cython/Includes/libcpp/queue.pxd,sha256=FbL4Q7C3lgtZ2YzictU1XBXzQ7G-6y9i_7l2eqzA3Xc,649
|
||||
Cython/Includes/libcpp/random.pxd,sha256=jgjjSbPvJturdi1NhYclH6NQRnDF3CiCbuPKgtrQ2lc,6203
|
||||
Cython/Includes/libcpp/set.pxd,sha256=IAEHB3ElvGIm9AX8fWoO9db1jpz6eVXLDgMgOcoHhcY,9176
|
||||
Cython/Includes/libcpp/stack.pxd,sha256=hCU6nVpHHkKhlzREnw4cSi64atGu9pWeuorFSZtEoh4,301
|
||||
Cython/Includes/libcpp/string.pxd,sha256=gT_1KTVrAt-0rnWT5S97GJhPChKpk-syeqa_Mj5L9wU,13840
|
||||
Cython/Includes/libcpp/typeindex.pxd,sha256=mIHr5Mq6Lol0SlzqeK6w_giVERh3uAjZm78yPDLXzc4,524
|
||||
Cython/Includes/libcpp/typeinfo.pxd,sha256=tITsqurrdaZjsEGFksem9xZtVhSxQRxHZxcoC-4Y-DY,304
|
||||
Cython/Includes/libcpp/unordered_map.pxd,sha256=dHnTuJ1S3ja7OYGRW-hZ1Zh_xDpv3iW6JUxs9Um_K4U,7945
|
||||
Cython/Includes/libcpp/unordered_set.pxd,sha256=yfmib2EnFDGn_RvlxCFkVy-eVapwQw2FvTHu-I5psTU,5810
|
||||
Cython/Includes/libcpp/utility.pxd,sha256=hTbvp7c12pnU2yvzzMvflZB-MAc_--3xh3PXtD_VIwg,1040
|
||||
Cython/Includes/libcpp/vector.pxd,sha256=lLjXSgOThX23KA6Suuyf0FrAY7kDiTs8xFJi5xKzetk,6839
|
||||
Cython/Includes/numpy/__init__.pxd,sha256=JbZ4wJihimLMlU6P4bT2iYy4E_hKrNenab-GC8h91QI,36457
|
||||
Cython/Includes/numpy/math.pxd,sha256=qZEdamaPgCFW4J7Itc6BWgOrQSKZdxDT6kbU_gqx2g4,5807
|
||||
Cython/Includes/openmp.pxd,sha256=3GTRd5JH31CvfTzXErglXnyf_jye1Gvk9O4giTa6pc0,1712
|
||||
Cython/Includes/posix/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Includes/posix/dlfcn.pxd,sha256=U-jAieh45NSlrlogsd6SJeCunYDxCG-AlQ7hpEXQgL4,356
|
||||
Cython/Includes/posix/fcntl.pxd,sha256=s0Qj0-T7Luzk0dJH4Jn_U54Suzf1LrfKDlFp7qg_nfM,1697
|
||||
Cython/Includes/posix/ioctl.pxd,sha256=2RC5zejPOCTkarDZM_6Vd2wc4oBuN7iaiL_C5MPBs90,99
|
||||
Cython/Includes/posix/mman.pxd,sha256=jeRRW5YRK4o2cLx5M98Ce--CP7GGZWVyy3ylW2mP6nU,3475
|
||||
Cython/Includes/posix/resource.pxd,sha256=_oeWwy1HOQ-PUAxfnM1Ha7jnSIi2uUosAaNaQmqUmsk,1338
|
||||
Cython/Includes/posix/select.pxd,sha256=cF6U60K7hYUzQuY8udA8VF50vTC7xxau1eynXrADzAU,619
|
||||
Cython/Includes/posix/signal.pxd,sha256=wFJI5UthdtU9mZWjEBeZ9IIfeX252JVwDk2tsbW_q3U,1876
|
||||
Cython/Includes/posix/stat.pxd,sha256=5sHZ4Ira3nVeNDynsM-7aEcJu7DfC07d_aTwlcUhC0Q,2695
|
||||
Cython/Includes/posix/stdio.pxd,sha256=nDxLG4Qdq2v9zLb-bfxphv9oCvCD5QenT2POqSX7Sww,1055
|
||||
Cython/Includes/posix/stdlib.pxd,sha256=G5Miv-QwID6Te9BQsz2vlRyKTpmvtuuYdwOUX4RxRoM,935
|
||||
Cython/Includes/posix/strings.pxd,sha256=GNEteqND2wgXXSvkv6U9eKSC9oIom3C7o2zQ6W_J_S4,374
|
||||
Cython/Includes/posix/time.pxd,sha256=lX06ykHd1qZsrw9ziKLpsGNdoN03PURRfrEngOMRBcs,1981
|
||||
Cython/Includes/posix/types.pxd,sha256=tWEWxST4EGHIgYS-Ce2SGjZ-KgmM2SVe1eggdcgv3JQ,1162
|
||||
Cython/Includes/posix/uio.pxd,sha256=lsHOhduB-LgUwWz8uMYlenGa29gtfc2B_K8Jjw7_8OY,822
|
||||
Cython/Includes/posix/unistd.pxd,sha256=w9B4d9NaXBsQ62XOr2xe9UFPGewmEk5BG6sqiRWdoM8,8061
|
||||
Cython/Includes/posix/wait.pxd,sha256=8bQAm7_cADrhT9ZY8-HZUn6dbIeIvEkuy-ZYmaSYQMg,1246
|
||||
Cython/Plex/Actions.cpython-311-x86_64-linux-gnu.so,sha256=yqfhf3eac_YWxsZ0aVb15SxJbOIZC1KtRMbbLP4lSKQ,80704
|
||||
Cython/Plex/Actions.pxd,sha256=AQbTp0D_OPz2nVkcaPiumYEFfaQacOVJCGorHctKHcM,581
|
||||
Cython/Plex/Actions.py,sha256=eGPLzGxlQ4ncVaaUaQk-Re2N6SEDYk7DFq3KFVZXVjo,2919
|
||||
Cython/Plex/DFA.cpython-311-x86_64-linux-gnu.so,sha256=hCvqfOxNG6864pvvrLILQP6JhdfeuvyAN2rJFjWVASs,128360
|
||||
Cython/Plex/DFA.pxd,sha256=ZU4_46Flh_0QN9eQFgIdVXuGo-iuFTQHFil97o_Ege0,776
|
||||
Cython/Plex/DFA.py,sha256=HRTKO0V2gWmQ-8ch9u38NXXI9gVJ3HRwg5W2owWWkwE,5427
|
||||
Cython/Plex/Errors.py,sha256=UsCwtNpxD2_BfStOsYfb1gMeU0xe5a_8yUDd7WKf7cc,976
|
||||
Cython/Plex/Lexicons.py,sha256=D8QeBOxAM06Zox9gn_Dxnk7JNPFI1_F4Wb2kb8nFKKI,5946
|
||||
Cython/Plex/Machines.cpython-311-x86_64-linux-gnu.so,sha256=W9tKGFkvybV2cuVHoLvgEXdrQeo81ei_gXLp5yQj8gY,176680
|
||||
Cython/Plex/Machines.pxd,sha256=PmaVCp9oQorpzUgpTQAPMwPlXTVjHfln73arpOthKLY,732
|
||||
Cython/Plex/Machines.py,sha256=MlbIyguDBOrZ9qaTXHUcEB8T664y4KKLG0V1iLNINv8,7684
|
||||
Cython/Plex/Regexps.py,sha256=Gei0aVp0eM-KJWNLqvgOj9thOTw6Swy-aRlx-gzVbmA,14957
|
||||
Cython/Plex/Scanners.cpython-311-x86_64-linux-gnu.so,sha256=L-956RiR4JNYm1zYktPXy3gJ3zf3CSvKbCe5sjU8G4o,123048
|
||||
Cython/Plex/Scanners.pxd,sha256=oiTi45TeGNyaGN09oX8y21HtEfgcnlH_iHMahlqAcPs,1552
|
||||
Cython/Plex/Scanners.py,sha256=GsW6Ow3mOW43aZGNImZ8h_j5sBFCMSX4cI6YokCYOBo,12939
|
||||
Cython/Plex/Transitions.cpython-311-x86_64-linux-gnu.so,sha256=GSRS8Trc2W4r55CyH9MLWTpBqblpdUSZ47Gf9flNR4Y,138408
|
||||
Cython/Plex/Transitions.pxd,sha256=Ewpk1zxElJtYDEDQPFkGRDh-KAFfwbbUi0sd2gLsTuU,590
|
||||
Cython/Plex/Transitions.py,sha256=qrlew4MHJytnuG0OCxOBmUiVPCm-8egSlRJg1vsN20w,6761
|
||||
Cython/Plex/__init__.py,sha256=HUAqdIcdoDbQWsTwPl0-cM5rclQCecGv2Ysz_3dcBRo,1155
|
||||
Cython/Plex/__pycache__/Actions.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/DFA.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Errors.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Lexicons.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Machines.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Regexps.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Scanners.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Transitions.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Runtime/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Runtime/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Runtime/refnanny.cpython-311-x86_64-linux-gnu.so,sha256=kNrjPiYL_v9YUJ40an7SwA-Znv2xnw2myvowLSZ-QNQ,93912
|
||||
Cython/Runtime/refnanny.pyx,sha256=K09EBbEVRgelbCnYDwlxwm3Fmuxsk8c4NW8xXz6s5hk,6611
|
||||
Cython/Shadow.py,sha256=jNFqWxRBGPdXraiqlzeF4-sH4tNvfXy7MD58LBHP3b0,17278
|
||||
Cython/Shadow.pyi,sha256=X7Y2fTL2wuz9Xb6I6pBQc3GQ1bQ6pa9gM4--hbz3qmo,2697
|
||||
Cython/StringIOTree.cpython-311-x86_64-linux-gnu.so,sha256=sFK6scoeZtYfYPZHt_8kLeg3tOnFyKeOvTvvv-mZOTo,89896
|
||||
Cython/StringIOTree.py,sha256=AB7jPmSEZXutd7v_JbDaTgg93vQEasP52X-QBtRPZ_4,5737
|
||||
Cython/Tempita/__init__.py,sha256=YHujYHiLoYUwFNNswJCgzSrDuie3sV08JsWT9Nbmp78,152
|
||||
Cython/Tempita/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/_looper.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/_tempita.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/compat3.cpython-311.pyc,,
|
||||
Cython/Tempita/_looper.py,sha256=jlStYhz9Pgp6NatX86k-netBNBmvwaeWxCRS_S8vcIM,4168
|
||||
Cython/Tempita/_tempita.cpython-311-x86_64-linux-gnu.so,sha256=BHfyCw36GfedxISkqSwLpK4gTQdK0XFFLh196LvU1s8,595904
|
||||
Cython/Tempita/_tempita.py,sha256=TyzL8e2Dpj3HTo7NsxSYN2LdvRShV6o9oEujNe6x8tU,37650
|
||||
Cython/Tempita/compat3.py,sha256=cjW1y266vRF5Xvh8kAu7_qHGT8AGGu2kGSJRK6DI-0E,903
|
||||
Cython/TestUtils.py,sha256=F_EvSitHy43ZNvZwZUy2JYJlHA5nMWGDlsEvjLahpeo,14703
|
||||
Cython/Tests/TestCodeWriter.py,sha256=ZbhNJEzEyxnl5w4dPw_8na6bpzSaeuZtYjEUecj4Ueo,3799
|
||||
Cython/Tests/TestCythonUtils.py,sha256=5wwdRgB3NgMvPoqMetIwRcddH0WgPrPI4bJq2pwEKQk,6825
|
||||
Cython/Tests/TestJediTyper.py,sha256=ppWoB_kWprErFZoTQctja1yBMia8jWdBbRaN5zhkYRM,7021
|
||||
Cython/Tests/TestShadow.py,sha256=MzvslvgEhJQDExPZBLSAClvJk4gDgfF3vafVx2VukkM,3384
|
||||
Cython/Tests/TestStringIOTree.py,sha256=vTuu3z32WTcmJaf0fBq62NMghYtaPL2rRnfdl2WM--4,1946
|
||||
Cython/Tests/TestTestUtils.py,sha256=6GJdzk66ewwC-ds132IzcmAhLWr8Oc9Ae18gFccwdxY,2966
|
||||
Cython/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Tests/__pycache__/TestCodeWriter.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestCythonUtils.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestJediTyper.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestShadow.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestStringIOTree.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestTestUtils.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/xmlrunner.cpython-311.pyc,,
|
||||
Cython/Tests/xmlrunner.py,sha256=9RrsdLNoOwd5nbqwxE_dWugLJ4RYdbP2t-0prJVpODM,14777
|
||||
Cython/Utility/AsyncGen.c,sha256=L8bb7P7lBEjfxURalqug0o4Soljs58FSlDbpRPMh1bQ,48376
|
||||
Cython/Utility/Buffer.c,sha256=JzuEsbFEMziYhHd2z-teXGwztKq2Lxioy1YKbbcSgrs,29910
|
||||
Cython/Utility/Builtins.c,sha256=ER4FCul2WdtlTRhFSqrdjI9gOzpf1S2CN0YIeZFq_w0,19205
|
||||
Cython/Utility/CConvert.pyx,sha256=EIC2CoisktkTpi_68-SF5eyftd6356t8W1c6dGQfx28,4419
|
||||
Cython/Utility/CMath.c,sha256=GIc7gd2WzaZryDJM3tefqXifLJpUJs6_T_c_mFrr-s8,2566
|
||||
Cython/Utility/CommonStructures.c,sha256=02ZcCAT2bXhZm2HJ1AIdx4fK0-TALfcI6GavxMVnDk4,4690
|
||||
Cython/Utility/Complex.c,sha256=91iLlSVz9zZ7fPiSPdRoKddG_yEQJXOt3eBd6dAbjXo,13609
|
||||
Cython/Utility/Coroutine.c,sha256=ofL1T1lkWx259qNTpPz5B_1AAmQOiejpfurJV-W11ko,99284
|
||||
Cython/Utility/CpdefEnums.pyx,sha256=R96TnoDtZucC9qvWmbQn3kuhxggugUQP5Pp5Vgmhwp4,6040
|
||||
Cython/Utility/CppConvert.pyx,sha256=tolYphcqNtj1KfxJLz1q76Js-JN-EMeuCBuN1N0N-ZU,7054
|
||||
Cython/Utility/CppSupport.cpp,sha256=B-nq6TcgMFFWfnZL2K7UhE_b0lZyjsuI6c-yQ9tzsv8,4934
|
||||
Cython/Utility/CythonFunction.c,sha256=mQdzqyxYMxKV-wPIUFPsyPDVIkyqkbKDC6riUShoay4,64260
|
||||
Cython/Utility/Dataclasses.c,sha256=Yky76ur34ZYi8mijd1ceC0Jc06B9aPEYBUu-LS5VSs0,7271
|
||||
Cython/Utility/Dataclasses.py,sha256=3lSw4xyskxbh6fyHKkYD_98nVWjZRpMQ5jnJQDtE0A0,4077
|
||||
Cython/Utility/Embed.c,sha256=JbXpWihTK9bq9rBhWMyaImZu8eTUvrrU6cWY-LZ8wA8,7436
|
||||
Cython/Utility/Exceptions.c,sha256=oidvxTU8mYMz9JMl7-ZZgrikCyIKLc8Gl5ovJu024rw,38955
|
||||
Cython/Utility/ExtensionTypes.c,sha256=t7ikHM9WDO8h8_WcCv8f_glW3mywGEylM96JFVSCtKM,25417
|
||||
Cython/Utility/FunctionArguments.c,sha256=nw4Cb-n8Mk4XrJhEj__iIjxT3EtA_XSnPHmlGAP6ukY,20740
|
||||
Cython/Utility/ImportExport.c,sha256=YeFwS6w-by14oI2KG17u04qz0yQLiNUUFmeRmsWnSYI,29972
|
||||
Cython/Utility/MemoryView.pyx,sha256=bMO2DFBecbQiO07engC4n2iUTI3RvAhR7c0lTa73b94,49594
|
||||
Cython/Utility/MemoryView_C.c,sha256=kEZ87RvCQcqTxcaWcPUVn4WgDJsDbCla1HqEbf8NFDE,31647
|
||||
Cython/Utility/ModuleSetupCode.c,sha256=fjXvPH6bwfyyRHeZzeCFxFeYryeFscW2h9BSVlCfzk8,86132
|
||||
Cython/Utility/NumpyImportArray.c,sha256=Gwo493DF8JxUxhTTjJYIdyHHJ9TEwFKqDmKsdk_uPyw,2033
|
||||
Cython/Utility/ObjectHandling.c,sha256=UTQYetFVANoYfOLAGc3IyicfkSMy3Vm6KtjJd-ajLds,116556
|
||||
Cython/Utility/Optimize.c,sha256=TpJPminM-vsQdslaVSu65LhaCnOyCMIcyj9hSTpTu8A,61029
|
||||
Cython/Utility/Overflow.c,sha256=1kwFjE2a3mdCG9gAlamP16tBjuNWFhs8s7UyHAQ2JkY,15874
|
||||
Cython/Utility/Printing.c,sha256=o8XnfjNIT8Ub5KY4FAp_FNw-OE3xqjy0MgmYWgDcWao,5103
|
||||
Cython/Utility/Profile.c,sha256=ZAO6vlT0FiQ5eXWl_JlmvQcFvrFiUov-RGLP3Gl-d78,18194
|
||||
Cython/Utility/StringTools.c,sha256=YaxeLKr6Dtu-HGJHk2tUSTolQfboMomsG0lb5rqG79A,45790
|
||||
Cython/Utility/TestCyUtilityLoader.pyx,sha256=91lWWJub7l_6xNn3ncrvQZZ94RpkQzEx2NtAaFpvrxY,152
|
||||
Cython/Utility/TestCythonScope.pyx,sha256=oE9x2UaH2KoyheZxKlRCCphtW8R4esePuJo4LoGh1Nc,1795
|
||||
Cython/Utility/TestUtilityLoader.c,sha256=dGy6ZWL2kBqtmUY7kF75UEox5kadQZ__BmZKscwg2aY,279
|
||||
Cython/Utility/TypeConversion.c,sha256=8OlHSjFUIAAeWQTP2FBhc5KBYXUCy9uXnpYblFxPAoA,47910
|
||||
Cython/Utility/UFuncs.pyx,sha256=dF32cppwl4Lelmpa7COgmRh1Vv4GD4uLhHG15g4E6gQ,2179
|
||||
Cython/Utility/UFuncs_C.c,sha256=BqiTcx1h5iINFAd3IubUHNEi8u_A1O_E-BOAf2J27d0,1519
|
||||
Cython/Utility/__init__.py,sha256=t2bpY-TYSX8lJdbKuBFJ1kBfpWVzgGw4xoZlCKfyj_s,1159
|
||||
Cython/Utility/__pycache__/Dataclasses.cpython-311.pyc,,
|
||||
Cython/Utility/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Utility/arrayarray.h,sha256=3Ll8Gd_S4rv8HaTfg5i6-aaoB9taI1vzwTp7NeA7Wy0,4089
|
||||
Cython/Utils.cpython-311-x86_64-linux-gnu.so,sha256=86qw6F2IwaGLvptyDxJsEVwioWuR7oAG29HW-l3gmes,396760
|
||||
Cython/Utils.py,sha256=_YpSLV29bS00oQLGqmTkgRxNXfFKAH1kyUtiQ7bO92I,22079
|
||||
Cython/__init__.py,sha256=GMnkoIas6hfN_meqZAJF9BEs1NuY4-4B2L0Uls7hXaA,358
|
||||
Cython/__pycache__/CodeWriter.cpython-311.pyc,,
|
||||
Cython/__pycache__/Coverage.cpython-311.pyc,,
|
||||
Cython/__pycache__/Debugging.cpython-311.pyc,,
|
||||
Cython/__pycache__/Shadow.cpython-311.pyc,,
|
||||
Cython/__pycache__/StringIOTree.cpython-311.pyc,,
|
||||
Cython/__pycache__/TestUtils.cpython-311.pyc,,
|
||||
Cython/__pycache__/Utils.cpython-311.pyc,,
|
||||
Cython/__pycache__/__init__.cpython-311.pyc,,
|
||||
__pycache__/cython.cpython-311.pyc,,
|
||||
cython.py,sha256=z2AtgHBGh0x0h0ZcGje7IhYlR6nGH_MmOh1fFMjqYn0,520
|
||||
pyximport/__init__.py,sha256=9hOyKolFtOerPiVEyktKrT1VtzbGexq9UmORzo52iHI,79
|
||||
pyximport/__pycache__/__init__.cpython-311.pyc,,
|
||||
pyximport/__pycache__/_pyximport2.cpython-311.pyc,,
|
||||
pyximport/__pycache__/_pyximport3.cpython-311.pyc,,
|
||||
pyximport/__pycache__/pyxbuild.cpython-311.pyc,,
|
||||
pyximport/__pycache__/pyximport.cpython-311.pyc,,
|
||||
pyximport/_pyximport2.py,sha256=mklesc9aVvPvUqWHXZhxlHF4rVrrirz1FFqJ30_x5IU,24364
|
||||
pyximport/_pyximport3.py,sha256=dGzBogaWn9eIWWyTZ9Un1jELxh8BdmSe3Fbxc3teX4M,18380
|
||||
pyximport/pyxbuild.py,sha256=AsL1tyLxG61Mj7Ah-DxtDBuaXF94W2Tb6KTos7r0w8I,5702
|
||||
pyximport/pyximport.py,sha256=23hjqx86b50J1MLmDBWBu_ESWLi1V7CoBzUYOKJi5oI,321
|
||||
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (72.1.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp311-cp311-manylinux_2_17_x86_64
|
||||
Tag: cp311-cp311-manylinux2014_x86_64
|
||||
|
||||
@ -0,0 +1,4 @@
|
||||
[console_scripts]
|
||||
cygdb = Cython.Debugger.Cygdb:main
|
||||
cython = Cython.Compiler.Main:setuptools_main
|
||||
cythonize = Cython.Build.Cythonize:main
|
||||
@ -0,0 +1,3 @@
|
||||
Cython
|
||||
cython
|
||||
pyximport
|
||||
@ -0,0 +1,170 @@
|
||||
"""
|
||||
Compile a Python script into an executable that embeds CPython.
|
||||
Requires CPython to be built as a shared library ('libpythonX.Y').
|
||||
|
||||
Basic usage:
|
||||
|
||||
python -m Cython.Build.BuildExecutable [ARGS] somefile.py
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
DEBUG = True
|
||||
|
||||
import sys
|
||||
import os
|
||||
if sys.version_info < (3, 9):
|
||||
from distutils import sysconfig as _sysconfig
|
||||
|
||||
class sysconfig(object):
|
||||
|
||||
@staticmethod
|
||||
def get_path(name):
|
||||
assert name == 'include'
|
||||
return _sysconfig.get_python_inc()
|
||||
|
||||
get_config_var = staticmethod(_sysconfig.get_config_var)
|
||||
else:
|
||||
# sysconfig can be trusted from cpython >= 3.8.7
|
||||
import sysconfig
|
||||
|
||||
|
||||
def get_config_var(name, default=''):
|
||||
return sysconfig.get_config_var(name) or default
|
||||
|
||||
INCDIR = sysconfig.get_path('include')
|
||||
LIBDIR1 = get_config_var('LIBDIR')
|
||||
LIBDIR2 = get_config_var('LIBPL')
|
||||
PYLIB = get_config_var('LIBRARY')
|
||||
PYLIB_DYN = get_config_var('LDLIBRARY')
|
||||
if PYLIB_DYN == PYLIB:
|
||||
# no shared library
|
||||
PYLIB_DYN = ''
|
||||
else:
|
||||
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
||||
|
||||
CC = get_config_var('CC', os.environ.get('CC', ''))
|
||||
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
||||
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
||||
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
||||
LIBS = get_config_var('LIBS')
|
||||
SYSLIBS = get_config_var('SYSLIBS')
|
||||
EXE_EXT = sysconfig.get_config_var('EXE')
|
||||
|
||||
|
||||
def _debug(msg, *args):
|
||||
if DEBUG:
|
||||
if args:
|
||||
msg = msg % args
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
|
||||
def dump_config():
|
||||
_debug('INCDIR: %s', INCDIR)
|
||||
_debug('LIBDIR1: %s', LIBDIR1)
|
||||
_debug('LIBDIR2: %s', LIBDIR2)
|
||||
_debug('PYLIB: %s', PYLIB)
|
||||
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
||||
_debug('CC: %s', CC)
|
||||
_debug('CFLAGS: %s', CFLAGS)
|
||||
_debug('LINKCC: %s', LINKCC)
|
||||
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
||||
_debug('LIBS: %s', LIBS)
|
||||
_debug('SYSLIBS: %s', SYSLIBS)
|
||||
_debug('EXE_EXT: %s', EXE_EXT)
|
||||
|
||||
|
||||
def _parse_args(args):
|
||||
cy_args = []
|
||||
last_arg = None
|
||||
for i, arg in enumerate(args):
|
||||
if arg.startswith('-'):
|
||||
cy_args.append(arg)
|
||||
elif last_arg in ('-X', '--directive'):
|
||||
cy_args.append(arg)
|
||||
else:
|
||||
input_file = arg
|
||||
args = args[i+1:]
|
||||
break
|
||||
last_arg = arg
|
||||
else:
|
||||
raise ValueError('no input file provided')
|
||||
|
||||
return input_file, cy_args, args
|
||||
|
||||
|
||||
def runcmd(cmd, shell=True):
|
||||
if shell:
|
||||
cmd = ' '.join(cmd)
|
||||
_debug(cmd)
|
||||
else:
|
||||
_debug(' '.join(cmd))
|
||||
|
||||
import subprocess
|
||||
returncode = subprocess.call(cmd, shell=shell)
|
||||
|
||||
if returncode:
|
||||
sys.exit(returncode)
|
||||
|
||||
|
||||
def clink(basename):
|
||||
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
||||
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
||||
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
||||
|
||||
|
||||
def ccompile(basename):
|
||||
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
||||
|
||||
|
||||
def cycompile(input_file, options=()):
|
||||
from ..Compiler import Version, CmdLine, Main
|
||||
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
||||
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
||||
result = Main.compile(sources, options)
|
||||
if result.num_errors > 0:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def exec_file(program_name, args=()):
|
||||
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
||||
|
||||
|
||||
def build(input_file, compiler_args=(), force=False):
|
||||
"""
|
||||
Build an executable program from a Cython module.
|
||||
|
||||
Returns the name of the executable file.
|
||||
"""
|
||||
basename = os.path.splitext(input_file)[0]
|
||||
exe_file = basename + EXE_EXT
|
||||
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
||||
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
||||
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
||||
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
||||
_debug("File is up to date, not regenerating %s", exe_file)
|
||||
return exe_file
|
||||
cycompile(input_file, compiler_args)
|
||||
ccompile(basename)
|
||||
clink(basename)
|
||||
return exe_file
|
||||
|
||||
|
||||
def build_and_run(args):
|
||||
"""
|
||||
Build an executable program from a Cython module and run it.
|
||||
|
||||
Arguments after the module name will be passed verbatimly to the program.
|
||||
"""
|
||||
program_name, args = _build(args)
|
||||
exec_file(program_name, args)
|
||||
|
||||
|
||||
def _build(args):
|
||||
input_file, cy_args, args = _parse_args(args)
|
||||
program_name = build(input_file, cy_args)
|
||||
return program_name, args
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_build(sys.argv[1:])
|
||||
255
venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
255
venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
@ -0,0 +1,255 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from .Dependencies import cythonize, extended_iglob
|
||||
from ..Utils import is_package_dir
|
||||
from ..Compiler import Options
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
||||
except ImportError:
|
||||
multiprocessing = None
|
||||
parallel_compiles = 0
|
||||
|
||||
|
||||
class _FakePool(object):
|
||||
def map_async(self, func, args):
|
||||
try:
|
||||
from itertools import imap
|
||||
except ImportError:
|
||||
imap=map
|
||||
for _ in imap(func, args):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def terminate(self):
|
||||
pass
|
||||
|
||||
def join(self):
|
||||
pass
|
||||
|
||||
|
||||
def find_package_base(path):
|
||||
base_dir, package_path = os.path.split(path)
|
||||
while is_package_dir(base_dir):
|
||||
base_dir, parent = os.path.split(base_dir)
|
||||
package_path = '%s/%s' % (parent, package_path)
|
||||
return base_dir, package_path
|
||||
|
||||
def cython_compile(path_pattern, options):
|
||||
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
||||
_cython_compile_files(all_paths, options)
|
||||
|
||||
def _cython_compile_files(all_paths, options):
|
||||
pool = None
|
||||
try:
|
||||
for path in all_paths:
|
||||
if options.build_inplace:
|
||||
base_dir = path
|
||||
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
||||
base_dir = os.path.dirname(base_dir)
|
||||
else:
|
||||
base_dir = None
|
||||
|
||||
if os.path.isdir(path):
|
||||
# recursively compiling a package
|
||||
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
||||
else:
|
||||
# assume it's a file(-like thing)
|
||||
paths = [path]
|
||||
|
||||
ext_modules = cythonize(
|
||||
paths,
|
||||
nthreads=options.parallel,
|
||||
exclude_failures=options.keep_going,
|
||||
exclude=options.excludes,
|
||||
compiler_directives=options.directives,
|
||||
compile_time_env=options.compile_time_env,
|
||||
force=options.force,
|
||||
quiet=options.quiet,
|
||||
depfile=options.depfile,
|
||||
language=options.language,
|
||||
**options.options)
|
||||
|
||||
if ext_modules and options.build:
|
||||
if len(ext_modules) > 1 and options.parallel > 1:
|
||||
if pool is None:
|
||||
try:
|
||||
pool = multiprocessing.Pool(options.parallel)
|
||||
except OSError:
|
||||
pool = _FakePool()
|
||||
pool.map_async(run_distutils, [
|
||||
(base_dir, [ext]) for ext in ext_modules])
|
||||
else:
|
||||
run_distutils((base_dir, ext_modules))
|
||||
except:
|
||||
if pool is not None:
|
||||
pool.terminate()
|
||||
raise
|
||||
else:
|
||||
if pool is not None:
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
|
||||
def run_distutils(args):
|
||||
try:
|
||||
from distutils.core import setup
|
||||
except ImportError:
|
||||
try:
|
||||
from setuptools import setup
|
||||
except ImportError:
|
||||
raise ImportError("'distutils' is not available. Please install 'setuptools' for binary builds.")
|
||||
|
||||
base_dir, ext_modules = args
|
||||
script_args = ['build_ext', '-i']
|
||||
cwd = os.getcwd()
|
||||
temp_dir = None
|
||||
try:
|
||||
if base_dir:
|
||||
os.chdir(base_dir)
|
||||
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
||||
script_args.extend(['--build-temp', temp_dir])
|
||||
setup(
|
||||
script_name='setup.py',
|
||||
script_args=script_args,
|
||||
ext_modules=ext_modules,
|
||||
)
|
||||
finally:
|
||||
if base_dir:
|
||||
os.chdir(cwd)
|
||||
if temp_dir and os.path.isdir(temp_dir):
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def create_args_parser():
|
||||
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
|
||||
|
||||
parser = ArgumentParser(
|
||||
formatter_class=RawDescriptionHelpFormatter,
|
||||
epilog="""\
|
||||
Environment variables:
|
||||
CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
|
||||
of modification times and changes.
|
||||
Environment variables accepted by setuptools are supported to configure the C compiler and build:
|
||||
https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
|
||||
)
|
||||
|
||||
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
||||
dest='directives', default={}, type=str,
|
||||
action=ParseDirectivesAction,
|
||||
help='set a compiler directive')
|
||||
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
||||
dest='compile_time_env', default={}, type=str,
|
||||
action=ParseCompileTimeEnvAction,
|
||||
help='set a compile time environment variable')
|
||||
parser.add_argument('-s', '--option', metavar='NAME=VALUE',
|
||||
dest='options', default={}, type=str,
|
||||
action=ParseOptionsAction,
|
||||
help='set a cythonize option')
|
||||
parser.add_argument('-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help='use Python 2 syntax mode by default')
|
||||
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_argument('-+', '--cplus', dest='language', action='store_const', const='c++', default=None,
|
||||
help='Compile as C++ rather than C')
|
||||
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
||||
help='Produce a colorized HTML version of the source.')
|
||||
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
||||
help='Produce a colorized HTML version of the source '
|
||||
'which includes entire generated C/C++-code.')
|
||||
parser.add_argument('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
||||
action='append', default=[],
|
||||
help='exclude certain file patterns from the compilation')
|
||||
|
||||
parser.add_argument('-b', '--build', dest='build', action='store_true', default=None,
|
||||
help='build extension modules using distutils/setuptools')
|
||||
parser.add_argument('-i', '--inplace', dest='build_inplace', action='store_true', default=None,
|
||||
help='build extension modules in place using distutils/setuptools (implies -b)')
|
||||
parser.add_argument('-j', '--parallel', dest='parallel', metavar='N',
|
||||
type=int, default=parallel_compiles,
|
||||
help=('run builds in N parallel jobs (default: %d)' %
|
||||
parallel_compiles or 1))
|
||||
parser.add_argument('-f', '--force', dest='force', action='store_true', default=None,
|
||||
help='force recompilation')
|
||||
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', default=None,
|
||||
help='be less verbose during compilation')
|
||||
|
||||
parser.add_argument('--lenient', dest='lenient', action='store_true', default=None,
|
||||
help='increase Python compatibility by ignoring some compile time errors')
|
||||
parser.add_argument('-k', '--keep-going', dest='keep_going', action='store_true', default=None,
|
||||
help='compile as much as possible, ignore compilation failures')
|
||||
parser.add_argument('--no-docstrings', dest='no_docstrings', action='store_true', default=None,
|
||||
help='strip docstrings')
|
||||
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
||||
parser.add_argument('sources', nargs='*')
|
||||
return parser
|
||||
|
||||
|
||||
def parse_args_raw(parser, args):
|
||||
options, unknown = parser.parse_known_args(args)
|
||||
sources = options.sources
|
||||
# if positional arguments were interspersed
|
||||
# some of them are in unknown
|
||||
for option in unknown:
|
||||
if option.startswith('-'):
|
||||
parser.error("unknown option "+option)
|
||||
else:
|
||||
sources.append(option)
|
||||
del options.sources
|
||||
return (options, sources)
|
||||
|
||||
|
||||
def parse_args(args):
|
||||
parser = create_args_parser()
|
||||
options, args = parse_args_raw(parser, args)
|
||||
|
||||
if not args:
|
||||
parser.error("no source files provided")
|
||||
if options.build_inplace:
|
||||
options.build = True
|
||||
if multiprocessing is None:
|
||||
options.parallel = 0
|
||||
if options.language_level:
|
||||
assert options.language_level in (2, 3, '3str')
|
||||
options.options['language_level'] = options.language_level
|
||||
|
||||
if options.lenient:
|
||||
# increase Python compatibility by ignoring compile time errors
|
||||
Options.error_on_unknown_names = False
|
||||
Options.error_on_uninitialized = False
|
||||
|
||||
if options.annotate:
|
||||
Options.annotate = options.annotate
|
||||
|
||||
if options.no_docstrings:
|
||||
Options.docstrings = False
|
||||
|
||||
return options, args
|
||||
|
||||
|
||||
def main(args=None):
|
||||
options, paths = parse_args(args)
|
||||
|
||||
all_paths = []
|
||||
for path in paths:
|
||||
expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
|
||||
if not expanded_path:
|
||||
import sys
|
||||
print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
all_paths.extend(expanded_path)
|
||||
_cython_compile_files(all_paths, options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1380
venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
1380
venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
||||
from Cython.Distutils.build_ext import build_ext
|
||||
372
venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
372
venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
@ -0,0 +1,372 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
import Cython
|
||||
from ..Compiler.Main import Context
|
||||
from ..Compiler.Options import (default_options, CompilationOptions,
|
||||
get_directive_defaults)
|
||||
|
||||
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
||||
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
||||
from ..Compiler.TreeFragment import parse_from_strings
|
||||
from ..Compiler.StringEncoding import _unicode
|
||||
from .Dependencies import strip_string_literals, cythonize, cached_function
|
||||
from ..Compiler import Pipeline
|
||||
from ..Utils import get_cython_cache_dir
|
||||
import cython as cython_module
|
||||
|
||||
|
||||
IS_PY3 = sys.version_info >= (3,)
|
||||
|
||||
# A utility function to convert user-supplied ASCII strings to unicode.
|
||||
if not IS_PY3:
|
||||
def to_unicode(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('ascii')
|
||||
else:
|
||||
return s
|
||||
else:
|
||||
to_unicode = lambda x: x
|
||||
|
||||
|
||||
if sys.version_info < (3, 5):
|
||||
import imp
|
||||
def load_dynamic(name, module_path):
|
||||
return imp.load_dynamic(name, module_path)
|
||||
else:
|
||||
import importlib.util
|
||||
from importlib.machinery import ExtensionFileLoader
|
||||
|
||||
def load_dynamic(name, path):
|
||||
spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
||||
def __init__(self):
|
||||
super(EnvTransform, self).__init__(context=None)
|
||||
self.unbound = set()
|
||||
def visit_NameNode(self, node):
|
||||
if not self.current_env().lookup(node.name):
|
||||
self.unbound.add(node.name)
|
||||
return node
|
||||
def __call__(self, node):
|
||||
super(UnboundSymbols, self).__call__(node)
|
||||
return self.unbound
|
||||
|
||||
|
||||
@cached_function
|
||||
def unbound_symbols(code, context=None):
|
||||
code = to_unicode(code)
|
||||
if context is None:
|
||||
context = Context([], get_directive_defaults(),
|
||||
options=CompilationOptions(default_options))
|
||||
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
||||
tree = parse_from_strings('(tree fragment)', code)
|
||||
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
||||
if phase is None:
|
||||
continue
|
||||
tree = phase(tree)
|
||||
if isinstance(phase, AnalyseDeclarationsTransform):
|
||||
break
|
||||
try:
|
||||
import builtins
|
||||
except ImportError:
|
||||
import __builtin__ as builtins
|
||||
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
||||
|
||||
|
||||
def unsafe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type is int:
|
||||
return 'long'
|
||||
else:
|
||||
return safe_type(arg, context)
|
||||
|
||||
|
||||
def safe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type in (list, tuple, dict, str):
|
||||
return py_type.__name__
|
||||
elif py_type is complex:
|
||||
return 'double complex'
|
||||
elif py_type is float:
|
||||
return 'double'
|
||||
elif py_type is bool:
|
||||
return 'bint'
|
||||
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
||||
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
||||
else:
|
||||
for base_type in py_type.__mro__:
|
||||
if base_type.__module__ in ('__builtin__', 'builtins'):
|
||||
return 'object'
|
||||
module = context.find_module(base_type.__module__, need_pxd=False)
|
||||
if module:
|
||||
entry = module.lookup(base_type.__name__)
|
||||
if entry.is_type:
|
||||
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
||||
return 'object'
|
||||
|
||||
|
||||
def _get_build_extension():
|
||||
dist = Distribution()
|
||||
# Ensure the build respects distutils configuration by parsing
|
||||
# the configuration files
|
||||
config_files = dist.find_config_files()
|
||||
dist.parse_config_files(config_files)
|
||||
build_extension = build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
return build_extension
|
||||
|
||||
|
||||
@cached_function
|
||||
def _create_context(cython_include_dirs):
|
||||
return Context(
|
||||
list(cython_include_dirs),
|
||||
get_directive_defaults(),
|
||||
options=CompilationOptions(default_options)
|
||||
)
|
||||
|
||||
|
||||
_cython_inline_cache = {}
|
||||
_cython_inline_default_context = _create_context(('.',))
|
||||
|
||||
|
||||
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
||||
for symbol in unbound_symbols:
|
||||
if symbol not in kwds:
|
||||
if locals is None or globals is None:
|
||||
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
||||
if locals is None:
|
||||
locals = calling_frame.f_locals
|
||||
if globals is None:
|
||||
globals = calling_frame.f_globals
|
||||
if not isinstance(locals, dict):
|
||||
# FrameLocalsProxy is stricter than dict on how it looks up keys
|
||||
# and this means our "EncodedStrings" don't match the keys in locals.
|
||||
# Therefore copy to a dict.
|
||||
locals = dict(locals)
|
||||
if symbol in locals:
|
||||
kwds[symbol] = locals[symbol]
|
||||
elif symbol in globals:
|
||||
kwds[symbol] = globals[symbol]
|
||||
else:
|
||||
print("Couldn't find %r" % symbol)
|
||||
|
||||
|
||||
def _inline_key(orig_code, arg_sigs, language_level):
|
||||
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
||||
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
def cython_inline(code, get_type=unsafe_type,
|
||||
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
||||
cython_include_dirs=None, cython_compiler_directives=None,
|
||||
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
||||
|
||||
if get_type is None:
|
||||
get_type = lambda x: 'object'
|
||||
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
||||
|
||||
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
||||
if language_level is None and 'language_level' not in cython_compiler_directives:
|
||||
language_level = '3str'
|
||||
if language_level is not None:
|
||||
cython_compiler_directives['language_level'] = language_level
|
||||
|
||||
key_hash = None
|
||||
|
||||
# Fast path if this has been called in this session.
|
||||
_unbound_symbols = _cython_inline_cache.get(code)
|
||||
if _unbound_symbols is not None:
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
args = sorted(kwds.items())
|
||||
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
||||
key_hash = _inline_key(code, arg_sigs, language_level)
|
||||
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
||||
if invoke is not None:
|
||||
arg_list = [arg[1] for arg in args]
|
||||
return invoke(*arg_list)
|
||||
|
||||
orig_code = code
|
||||
code = to_unicode(code)
|
||||
code, literals = strip_string_literals(code)
|
||||
code = strip_common_indent(code)
|
||||
if locals is None:
|
||||
locals = inspect.currentframe().f_back.f_back.f_locals
|
||||
if globals is None:
|
||||
globals = inspect.currentframe().f_back.f_back.f_globals
|
||||
try:
|
||||
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
except AssertionError:
|
||||
if not quiet:
|
||||
# Parsing from strings not fully supported (e.g. cimports).
|
||||
print("Could not parse code as a string (to extract unbound symbols).")
|
||||
|
||||
cimports = []
|
||||
for name, arg in list(kwds.items()):
|
||||
if arg is cython_module:
|
||||
cimports.append('\ncimport cython as %s' % name)
|
||||
del kwds[name]
|
||||
arg_names = sorted(kwds)
|
||||
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
||||
if key_hash is None:
|
||||
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
||||
module_name = "_cython_inline_" + key_hash
|
||||
|
||||
if module_name in sys.modules:
|
||||
module = sys.modules[module_name]
|
||||
|
||||
else:
|
||||
build_extension = None
|
||||
if cython_inline.so_ext is None:
|
||||
# Figure out and cache current extension suffix
|
||||
build_extension = _get_build_extension()
|
||||
cython_inline.so_ext = build_extension.get_ext_filename('')
|
||||
|
||||
lib_dir = os.path.abspath(lib_dir)
|
||||
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
if force or not os.path.isfile(module_path):
|
||||
cflags = []
|
||||
define_macros = []
|
||||
c_include_dirs = []
|
||||
qualified = re.compile(r'([.\w]+)[.]')
|
||||
for type, _ in arg_sigs:
|
||||
m = qualified.match(type)
|
||||
if m:
|
||||
cimports.append('\ncimport %s' % m.groups()[0])
|
||||
# one special case
|
||||
if m.groups()[0] == 'numpy':
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
define_macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
|
||||
# cflags.append('-Wno-unused')
|
||||
module_body, func_body = extract_func_code(code)
|
||||
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
||||
module_code = """
|
||||
%(module_body)s
|
||||
%(cimports)s
|
||||
def __invoke(%(params)s):
|
||||
%(func_body)s
|
||||
return locals()
|
||||
""" % {'cimports': '\n'.join(cimports),
|
||||
'module_body': module_body,
|
||||
'params': params,
|
||||
'func_body': func_body }
|
||||
for key, value in literals.items():
|
||||
module_code = module_code.replace(key, value)
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
fh = open(pyx_file, 'w')
|
||||
try:
|
||||
fh.write(module_code)
|
||||
finally:
|
||||
fh.close()
|
||||
extension = Extension(
|
||||
name=module_name,
|
||||
sources=[pyx_file],
|
||||
include_dirs=c_include_dirs or None,
|
||||
extra_compile_args=cflags or None,
|
||||
define_macros=define_macros or None,
|
||||
)
|
||||
if build_extension is None:
|
||||
build_extension = _get_build_extension()
|
||||
build_extension.extensions = cythonize(
|
||||
[extension],
|
||||
include_path=cython_include_dirs or ['.'],
|
||||
compiler_directives=cython_compiler_directives,
|
||||
quiet=quiet)
|
||||
build_extension.build_temp = os.path.dirname(pyx_file)
|
||||
build_extension.build_lib = lib_dir
|
||||
build_extension.run()
|
||||
|
||||
if sys.platform == 'win32' and sys.version_info >= (3, 8):
|
||||
with os.add_dll_directory(os.path.abspath(lib_dir)):
|
||||
module = load_dynamic(module_name, module_path)
|
||||
else:
|
||||
module = load_dynamic(module_name, module_path)
|
||||
|
||||
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
||||
arg_list = [kwds[arg] for arg in arg_names]
|
||||
return module.__invoke(*arg_list)
|
||||
|
||||
|
||||
# Cached suffix used by cython_inline above. None should get
|
||||
# overridden with actual value upon the first cython_inline invocation
|
||||
cython_inline.so_ext = None
|
||||
|
||||
_find_non_space = re.compile('[^ ]').search
|
||||
|
||||
|
||||
def strip_common_indent(code):
|
||||
min_indent = None
|
||||
lines = code.splitlines()
|
||||
for line in lines:
|
||||
match = _find_non_space(line)
|
||||
if not match:
|
||||
continue # blank
|
||||
indent = match.start()
|
||||
if line[indent] == '#':
|
||||
continue # comment
|
||||
if min_indent is None or min_indent > indent:
|
||||
min_indent = indent
|
||||
for ix, line in enumerate(lines):
|
||||
match = _find_non_space(line)
|
||||
if not match or not line or line[indent:indent+1] == '#':
|
||||
continue
|
||||
lines[ix] = line[min_indent:]
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
||||
def extract_func_code(code):
|
||||
module = []
|
||||
function = []
|
||||
current = function
|
||||
code = code.replace('\t', ' ')
|
||||
lines = code.split('\n')
|
||||
for line in lines:
|
||||
if not line.startswith(' '):
|
||||
if module_statement.match(line):
|
||||
current = module
|
||||
else:
|
||||
current = function
|
||||
current.append(line)
|
||||
return '\n'.join(module), ' ' + '\n '.join(function)
|
||||
|
||||
|
||||
def get_body(source):
|
||||
ix = source.index(':')
|
||||
if source[:5] == 'lambda':
|
||||
return "return %s" % source[ix+1:]
|
||||
else:
|
||||
return source[ix+1:]
|
||||
|
||||
|
||||
# Lots to be done here... It would be especially cool if compiled functions
|
||||
# could invoke each other quickly.
|
||||
class RuntimeCompiledFunction(object):
|
||||
|
||||
def __init__(self, f):
|
||||
self._f = f
|
||||
self._body = get_body(inspect.getsource(f))
|
||||
|
||||
def __call__(self, *args, **kwds):
|
||||
all = inspect.getcallargs(self._f, *args, **kwds)
|
||||
if IS_PY3:
|
||||
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
||||
else:
|
||||
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
|
||||
572
venv/lib/python3.11/site-packages/Cython/Build/IpythonMagic.py
Normal file
572
venv/lib/python3.11/site-packages/Cython/Build/IpythonMagic.py
Normal file
@ -0,0 +1,572 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
=====================
|
||||
Cython related magics
|
||||
=====================
|
||||
|
||||
Magic command interface for interactive work with Cython
|
||||
|
||||
.. note::
|
||||
|
||||
The ``Cython`` package needs to be installed separately. It
|
||||
can be obtained using ``easy_install`` or ``pip``.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
To enable the magics below, execute ``%load_ext cython``.
|
||||
|
||||
``%%cython``
|
||||
|
||||
{CYTHON_DOC}
|
||||
|
||||
``%%cython_inline``
|
||||
|
||||
{CYTHON_INLINE_DOC}
|
||||
|
||||
``%%cython_pyximport``
|
||||
|
||||
{CYTHON_PYXIMPORT_DOC}
|
||||
|
||||
Author:
|
||||
* Brian Granger
|
||||
|
||||
Code moved from IPython and adapted by:
|
||||
* Martín Gaitán
|
||||
|
||||
Parts of this code were taken from Cython.inline.
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2010-2011, IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import copy
|
||||
import distutils.log
|
||||
import textwrap
|
||||
|
||||
IO_ENCODING = sys.getfilesystemencoding()
|
||||
IS_PY2 = sys.version_info[0] < 3
|
||||
|
||||
import hashlib
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
from IPython.core import display
|
||||
from IPython.core import magic_arguments
|
||||
from IPython.core.magic import Magics, magics_class, cell_magic
|
||||
try:
|
||||
from IPython.paths import get_ipython_cache_dir
|
||||
except ImportError:
|
||||
# older IPython version
|
||||
from IPython.utils.path import get_ipython_cache_dir
|
||||
from IPython.utils.text import dedent
|
||||
|
||||
from ..Shadow import __version__ as cython_version
|
||||
from ..Compiler.Errors import CompileError
|
||||
from .Inline import cython_inline, load_dynamic
|
||||
from .Dependencies import cythonize
|
||||
from ..Utils import captured_fd, print_captured
|
||||
|
||||
|
||||
PGO_CONFIG = {
|
||||
'gcc': {
|
||||
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
||||
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
||||
},
|
||||
# blind copy from 'configure' script in CPython 3.7
|
||||
'icc': {
|
||||
'gen': ['-prof-gen'],
|
||||
'use': ['-prof-use'],
|
||||
}
|
||||
}
|
||||
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
||||
|
||||
|
||||
if IS_PY2:
|
||||
def encode_fs(name):
|
||||
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
|
||||
else:
|
||||
def encode_fs(name):
|
||||
return name
|
||||
|
||||
|
||||
@magics_class
|
||||
class CythonMagics(Magics):
|
||||
|
||||
def __init__(self, shell):
|
||||
super(CythonMagics, self).__init__(shell)
|
||||
self._reloads = {}
|
||||
self._code_cache = {}
|
||||
self._pyximport_installed = False
|
||||
|
||||
def _import_all(self, module):
|
||||
mdict = module.__dict__
|
||||
if '__all__' in mdict:
|
||||
keys = mdict['__all__']
|
||||
else:
|
||||
keys = [k for k in mdict if not k.startswith('_')]
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
self.shell.push({k: mdict[k]})
|
||||
except KeyError:
|
||||
msg = "'module' object has no attribute '%s'" % k
|
||||
raise AttributeError(msg)
|
||||
|
||||
@cell_magic
|
||||
def cython_inline(self, line, cell):
|
||||
"""Compile and run a Cython code cell using Cython.inline.
|
||||
|
||||
This magic simply passes the body of the cell to Cython.inline
|
||||
and returns the result. If the variables `a` and `b` are defined
|
||||
in the user's namespace, here is a simple example that returns
|
||||
their sum::
|
||||
|
||||
%%cython_inline
|
||||
return a+b
|
||||
|
||||
For most purposes, we recommend the usage of the `%%cython` magic.
|
||||
"""
|
||||
locs = self.shell.user_global_ns
|
||||
globs = self.shell.user_ns
|
||||
return cython_inline(cell, locals=locs, globals=globs)
|
||||
|
||||
@cell_magic
|
||||
def cython_pyximport(self, line, cell):
|
||||
"""Compile and import a Cython code cell using pyximport.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the current
|
||||
working directory, which is then imported using `pyximport`. This
|
||||
magic requires a module name to be passed::
|
||||
|
||||
%%cython_pyximport modulename
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
The compiled module is then imported and all of its symbols are
|
||||
injected into the user's namespace. For most purposes, we recommend
|
||||
the usage of the `%%cython` magic.
|
||||
"""
|
||||
module_name = line.strip()
|
||||
if not module_name:
|
||||
raise ValueError('module name must be given')
|
||||
fname = module_name + '.pyx'
|
||||
with io.open(fname, 'w', encoding='utf-8') as f:
|
||||
f.write(cell)
|
||||
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
||||
import pyximport
|
||||
pyximport.install()
|
||||
self._pyximport_installed = True
|
||||
if module_name in self._reloads:
|
||||
module = self._reloads[module_name]
|
||||
# Note: reloading extension modules is not actually supported
|
||||
# (requires PEP-489 reinitialisation support).
|
||||
# Don't know why this should ever have worked as it reads here.
|
||||
# All we really need to do is to update the globals below.
|
||||
#reload(module)
|
||||
else:
|
||||
__import__(module_name)
|
||||
module = sys.modules[module_name]
|
||||
self._reloads[module_name] = module
|
||||
self._import_all(module)
|
||||
|
||||
@magic_arguments.magic_arguments()
|
||||
@magic_arguments.argument(
|
||||
'-a', '--annotate', action='store_const', const='default', dest='annotate',
|
||||
help="Produce a colorized HTML version of the source."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
||||
help="Produce a colorized HTML version of the source "
|
||||
"which includes entire generated C/C++-code."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-+', '--cplus', action='store_true', default=False,
|
||||
help="Output a C++ rather than C file."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-3', dest='language_level', action='store_const', const=3, default=None,
|
||||
help="Select Python 3 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help="Select Python 2 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-f', '--force', action='store_true', default=False,
|
||||
help="Force the compilation of a new module, even if the source has been "
|
||||
"previously compiled."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-c', '--compile-args', action='append', default=[],
|
||||
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--link-args', action='append', default=[],
|
||||
help="Extra flags to pass to linker via the `extra_link_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-l', '--lib', action='append', default=[],
|
||||
help="Add a library to link the extension against (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-n', '--name',
|
||||
help="Specify a name for the Cython module."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
||||
help="Add a path to the list of library directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-I', '--include', action='append', default=[],
|
||||
help="Add a path to the list of include directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-S', '--src', action='append', default=[],
|
||||
help="Add a path to the list of src files (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--pgo', dest='pgo', action='store_true', default=False,
|
||||
help=("Enable profile guided optimisation in the C compiler. "
|
||||
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--verbose', dest='quiet', action='store_false', default=True,
|
||||
help=("Print debug information like generated .c/.cpp file location "
|
||||
"and exact gcc/g++ command invoked.")
|
||||
)
|
||||
@cell_magic
|
||||
def cython(self, line, cell):
|
||||
"""Compile and import everything from a Cython code cell.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the
|
||||
directory `IPYTHONDIR/cython` using a filename with the hash of the
|
||||
code. This file is then cythonized and compiled. The resulting module
|
||||
is imported and all of its symbols are injected into the user's
|
||||
namespace. The usage is similar to that of `%%cython_pyximport` but
|
||||
you don't have to pass a module name::
|
||||
|
||||
%%cython
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
To compile OpenMP codes, pass the required `--compile-args`
|
||||
and `--link-args`. For example with gcc::
|
||||
|
||||
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
||||
...
|
||||
|
||||
To enable profile guided optimisation, pass the ``--pgo`` option.
|
||||
Note that the cell itself needs to take care of establishing a suitable
|
||||
profile when executed. This can be done by implementing the functions to
|
||||
optimise, and then calling them directly in the same cell on some realistic
|
||||
training data like this::
|
||||
|
||||
%%cython --pgo
|
||||
def critical_function(data):
|
||||
for item in data:
|
||||
...
|
||||
|
||||
# execute function several times to build profile
|
||||
from somewhere import some_typical_data
|
||||
for _ in range(100):
|
||||
critical_function(some_typical_data)
|
||||
|
||||
In Python 3.5 and later, you can distinguish between the profile and
|
||||
non-profile runs as follows::
|
||||
|
||||
if "_pgo_" in __name__:
|
||||
... # execute critical code here
|
||||
"""
|
||||
args = magic_arguments.parse_argstring(self.cython, line)
|
||||
code = cell if cell.endswith('\n') else cell + '\n'
|
||||
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
||||
key = (code, line, sys.version_info, sys.executable, cython_version)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
|
||||
if args.pgo:
|
||||
key += ('pgo',)
|
||||
if args.force:
|
||||
# Force a new module name by adding the current time to the
|
||||
# key which is hashed to determine the module name.
|
||||
key += (time.time(),)
|
||||
|
||||
if args.name:
|
||||
module_name = str(args.name) # no-op in Py3
|
||||
else:
|
||||
module_name = "_cython_magic_" + hashlib.sha1(str(key).encode('utf-8')).hexdigest()
|
||||
html_file = os.path.join(lib_dir, module_name + '.html')
|
||||
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
||||
|
||||
have_module = os.path.isfile(module_path)
|
||||
need_cythonize = args.pgo or not have_module
|
||||
|
||||
if args.annotate:
|
||||
if not os.path.isfile(html_file):
|
||||
need_cythonize = True
|
||||
|
||||
extension = None
|
||||
if need_cythonize:
|
||||
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
||||
if extensions is None:
|
||||
# Compilation failed and printed error message
|
||||
return None
|
||||
assert len(extensions) == 1
|
||||
extension = extensions[0]
|
||||
self._code_cache[key] = module_name
|
||||
|
||||
if args.pgo:
|
||||
self._profile_pgo_wrapper(extension, lib_dir)
|
||||
|
||||
def print_compiler_output(stdout, stderr, where):
|
||||
# On windows, errors are printed to stdout, we redirect both to sys.stderr.
|
||||
print_captured(stdout, where, u"Content of stdout:\n")
|
||||
print_captured(stderr, where, u"Content of stderr:\n")
|
||||
|
||||
get_stderr = get_stdout = None
|
||||
try:
|
||||
with captured_fd(1) as get_stdout:
|
||||
with captured_fd(2) as get_stderr:
|
||||
self._build_extension(
|
||||
extension, lib_dir, pgo_step_name='use' if args.pgo else None, quiet=args.quiet)
|
||||
except (distutils.errors.CompileError, distutils.errors.LinkError):
|
||||
# Build failed, print error message from compiler/linker
|
||||
print_compiler_output(get_stdout(), get_stderr(), sys.stderr)
|
||||
return None
|
||||
|
||||
# Build seems ok, but we might still want to show any warnings that occurred
|
||||
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
|
||||
|
||||
module = load_dynamic(module_name, module_path)
|
||||
self._import_all(module)
|
||||
|
||||
if args.annotate:
|
||||
try:
|
||||
with io.open(html_file, encoding='utf-8') as f:
|
||||
annotated_html = f.read()
|
||||
except IOError as e:
|
||||
# File could not be opened. Most likely the user has a version
|
||||
# of Cython before 0.15.1 (when `cythonize` learned the
|
||||
# `force` keyword argument) and has already compiled this
|
||||
# exact source without annotation.
|
||||
print('Cython completed successfully but the annotated '
|
||||
'source could not be read.', file=sys.stderr)
|
||||
print(e, file=sys.stderr)
|
||||
else:
|
||||
return display.HTML(self.clean_annotated_html(annotated_html))
|
||||
|
||||
def _profile_pgo_wrapper(self, extension, lib_dir):
|
||||
"""
|
||||
Generate a .c file for a separate extension module that calls the
|
||||
module init function of the original module. This makes sure that the
|
||||
PGO profiler sees the correct .o file of the final module, but it still
|
||||
allows us to import the module under a different name for profiling,
|
||||
before recompiling it into the PGO optimised module. Overwriting and
|
||||
reimporting the same shared library is not portable.
|
||||
"""
|
||||
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
||||
module_name = extension.name
|
||||
pgo_module_name = '_pgo_' + module_name
|
||||
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
||||
with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
||||
f.write(textwrap.dedent(u"""
|
||||
#include "Python.h"
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
extern PyMODINIT_FUNC init%(module_name)s(void);
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void) {
|
||||
PyObject *sys_modules;
|
||||
init%(module_name)s(); if (PyErr_Occurred()) return;
|
||||
sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
|
||||
if (sys_modules) {
|
||||
PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
|
||||
PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
|
||||
Py_DECREF(module);
|
||||
}
|
||||
}
|
||||
#else
|
||||
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
||||
return PyInit_%(module_name)s();
|
||||
}
|
||||
#endif
|
||||
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
||||
|
||||
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
||||
extension.name = pgo_module_name
|
||||
|
||||
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
||||
|
||||
# import and execute module code to generate profile
|
||||
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
||||
load_dynamic(pgo_module_name, so_module_path)
|
||||
|
||||
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
pyx_file = encode_fs(pyx_file)
|
||||
|
||||
c_include_dirs = args.include
|
||||
c_src_files = list(map(str, args.src))
|
||||
if 'numpy' in code:
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
with io.open(pyx_file, 'w', encoding='utf-8') as f:
|
||||
f.write(code)
|
||||
extension = Extension(
|
||||
name=module_name,
|
||||
sources=[pyx_file] + c_src_files,
|
||||
include_dirs=c_include_dirs,
|
||||
library_dirs=args.library_dirs,
|
||||
extra_compile_args=args.compile_args,
|
||||
extra_link_args=args.link_args,
|
||||
libraries=args.lib,
|
||||
language='c++' if args.cplus else 'c',
|
||||
)
|
||||
try:
|
||||
opts = dict(
|
||||
quiet=quiet,
|
||||
annotate=args.annotate,
|
||||
force=True,
|
||||
language_level=min(3, sys.version_info[0]),
|
||||
)
|
||||
if args.language_level is not None:
|
||||
assert args.language_level in (2, 3)
|
||||
opts['language_level'] = args.language_level
|
||||
return cythonize([extension], **opts)
|
||||
except CompileError:
|
||||
return None
|
||||
|
||||
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
||||
build_extension = self._get_build_extension(
|
||||
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
||||
old_threshold = None
|
||||
try:
|
||||
if not quiet:
|
||||
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
||||
build_extension.run()
|
||||
finally:
|
||||
if not quiet and old_threshold is not None:
|
||||
distutils.log.set_threshold(old_threshold)
|
||||
|
||||
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
||||
compiler_type = build_extension.compiler.compiler_type
|
||||
if compiler_type == 'unix':
|
||||
compiler_cmd = build_extension.compiler.compiler_so
|
||||
# TODO: we could try to call "[cmd] --version" for better insights
|
||||
if not compiler_cmd:
|
||||
pass
|
||||
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
||||
compiler_type = 'clang'
|
||||
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
||||
compiler_type = 'icc'
|
||||
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
config = PGO_CONFIG.get(compiler_type)
|
||||
orig_flags = []
|
||||
if config and step_name in config:
|
||||
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
||||
for extension in build_extension.extensions:
|
||||
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
||||
extension.extra_compile_args = extension.extra_compile_args + flags
|
||||
extension.extra_link_args = extension.extra_link_args + flags
|
||||
else:
|
||||
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
||||
file=sys.stderr)
|
||||
return orig_flags
|
||||
|
||||
@property
|
||||
def so_ext(self):
|
||||
"""The extension suffix for compiled modules."""
|
||||
try:
|
||||
return self._so_ext
|
||||
except AttributeError:
|
||||
self._so_ext = self._get_build_extension().get_ext_filename('')
|
||||
return self._so_ext
|
||||
|
||||
def _clear_distutils_mkpath_cache(self):
|
||||
"""clear distutils mkpath cache
|
||||
|
||||
prevents distutils from skipping re-creation of dirs that have been removed
|
||||
"""
|
||||
try:
|
||||
from distutils.dir_util import _path_created
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
_path_created.clear()
|
||||
|
||||
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
||||
pgo_step_name=None, _build_ext=build_ext):
|
||||
self._clear_distutils_mkpath_cache()
|
||||
dist = Distribution()
|
||||
config_files = dist.find_config_files()
|
||||
try:
|
||||
config_files.remove('setup.cfg')
|
||||
except ValueError:
|
||||
pass
|
||||
dist.parse_config_files(config_files)
|
||||
|
||||
if not temp_dir:
|
||||
temp_dir = lib_dir
|
||||
add_pgo_flags = self._add_pgo_flags
|
||||
|
||||
if pgo_step_name:
|
||||
base_build_ext = _build_ext
|
||||
class _build_ext(_build_ext):
|
||||
def build_extensions(self):
|
||||
add_pgo_flags(self, pgo_step_name, temp_dir)
|
||||
base_build_ext.build_extensions(self)
|
||||
|
||||
build_extension = _build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
if temp_dir:
|
||||
temp_dir = encode_fs(temp_dir)
|
||||
build_extension.build_temp = temp_dir
|
||||
if lib_dir:
|
||||
lib_dir = encode_fs(lib_dir)
|
||||
build_extension.build_lib = lib_dir
|
||||
if extension is not None:
|
||||
build_extension.extensions = [extension]
|
||||
return build_extension
|
||||
|
||||
@staticmethod
|
||||
def clean_annotated_html(html):
|
||||
"""Clean up the annotated HTML source.
|
||||
|
||||
Strips the link to the generated C or C++ file, which we do not
|
||||
present to the user.
|
||||
"""
|
||||
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
||||
html = '\n'.join(l for l in html.splitlines() if not r.match(l))
|
||||
return html
|
||||
|
||||
__doc__ = __doc__.format(
|
||||
# rST doesn't see the -+ flag as part of an option list, so we
|
||||
# hide it from the module-level docstring.
|
||||
CYTHON_DOC=dedent(CythonMagics.cython.__doc__
|
||||
.replace('-+, --cplus', '--cplus ')),
|
||||
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
||||
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
||||
)
|
||||
@ -0,0 +1,119 @@
|
||||
import difflib
|
||||
import glob
|
||||
import gzip
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import Cython.Build.Dependencies
|
||||
import Cython.Utils
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
|
||||
class TestCyCache(CythonTest):
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.temp_dir = tempfile.mkdtemp(
|
||||
prefix='cycache-test',
|
||||
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
|
||||
self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
|
||||
|
||||
def cache_files(self, file_glob):
|
||||
return glob.glob(os.path.join(self.cache_dir, file_glob))
|
||||
|
||||
def fresh_cythonize(self, *args, **kwargs):
|
||||
Cython.Utils.clear_function_caches()
|
||||
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
||||
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
||||
|
||||
def test_cycache_switch(self):
|
||||
content1 = 'value = 1\n'
|
||||
content2 = 'value = 2\n'
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(1, len(self.cache_files('a.c*')))
|
||||
with open(a_c) as f:
|
||||
a_contents1 = f.read()
|
||||
os.unlink(a_c)
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write(content2)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
with open(a_c) as f:
|
||||
a_contents2 = f.read()
|
||||
os.unlink(a_c)
|
||||
|
||||
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
with open(a_c) as f:
|
||||
a_contents = f.read()
|
||||
self.assertEqual(
|
||||
a_contents, a_contents1,
|
||||
msg='\n'.join(list(difflib.unified_diff(
|
||||
a_contents.split('\n'), a_contents1.split('\n')))[:10]))
|
||||
|
||||
def test_cycache_uses_cache(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write('pass')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
|
||||
with gzip.GzipFile(a_cache, 'wb') as gzipfile:
|
||||
gzipfile.write('fake stuff'.encode('ascii'))
|
||||
os.unlink(a_c)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
with open(a_c) as f:
|
||||
a_contents = f.read()
|
||||
self.assertEqual(a_contents, 'fake stuff',
|
||||
'Unexpected contents: %s...' % a_contents[:100])
|
||||
|
||||
def test_multi_file_output(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
a_h = a_pyx[:-4] + '.h'
|
||||
a_api_h = a_pyx[:-4] + '_api.h'
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write('cdef public api int foo(int x): return x\n')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
expected = [a_c, a_h, a_api_h]
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
os.unlink(output)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
|
||||
def test_options_invalidation(self):
|
||||
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
|
||||
hash_c = hash_pyx[:-len('.pyx')] + '.c'
|
||||
|
||||
with open(hash_pyx, 'w') as f:
|
||||
f.write('pass')
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
|
||||
self.assertEqual(1, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
@ -0,0 +1,482 @@
|
||||
from Cython.Build.Cythonize import (
|
||||
create_args_parser, parse_args_raw, parse_args,
|
||||
parallel_compiles
|
||||
)
|
||||
|
||||
from Cython.Compiler import Options
|
||||
from Cython.Compiler.Tests.Utils import backup_Options, restore_Options, check_global_options
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
import sys
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO # doesn't accept 'str' in Py2
|
||||
|
||||
|
||||
class TestCythonizeArgsParser(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
TestCase.setUp(self)
|
||||
self.parse_args = lambda x, parser=create_args_parser() : parse_args_raw(parser, x)
|
||||
|
||||
|
||||
def are_default(self, options, skip):
|
||||
# empty containers
|
||||
empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
|
||||
are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
|
||||
for opt_name in empty_containers:
|
||||
if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
|
||||
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
||||
return False
|
||||
for opt_name in are_none:
|
||||
if (getattr(options, opt_name) is not None) and (opt_name not in skip):
|
||||
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
||||
return False
|
||||
if options.parallel!=parallel_compiles and ('parallel' not in skip):
|
||||
return False
|
||||
return True
|
||||
|
||||
# testing directives:
|
||||
def test_directive_short(self):
|
||||
options, args = self.parse_args(['-X', 'cdivision=True'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], True)
|
||||
|
||||
def test_directive_long(self):
|
||||
options, args = self.parse_args(['--directive', 'cdivision=True'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], True)
|
||||
|
||||
def test_directive_multiple(self):
|
||||
options, args = self.parse_args(['-X', 'cdivision=True', '-X', 'c_string_type=bytes'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], True)
|
||||
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
||||
|
||||
def test_directive_multiple_v2(self):
|
||||
options, args = self.parse_args(['-X', 'cdivision=True,c_string_type=bytes'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], True)
|
||||
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
||||
|
||||
def test_directive_value_yes(self):
|
||||
options, args = self.parse_args(['-X', 'cdivision=YeS'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], True)
|
||||
|
||||
def test_directive_value_no(self):
|
||||
options, args = self.parse_args(['-X', 'cdivision=no'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']))
|
||||
self.assertEqual(options.directives['cdivision'], False)
|
||||
|
||||
def test_directive_value_invalid(self):
|
||||
with self.assertRaises(ValueError) as context:
|
||||
options, args = self.parse_args(['-X', 'cdivision=sadfasd'])
|
||||
|
||||
def test_directive_key_invalid(self):
|
||||
with self.assertRaises(ValueError) as context:
|
||||
options, args = self.parse_args(['-X', 'abracadabra'])
|
||||
|
||||
def test_directive_no_value(self):
|
||||
with self.assertRaises(ValueError) as context:
|
||||
options, args = self.parse_args(['-X', 'cdivision'])
|
||||
|
||||
def test_directives_types(self):
|
||||
directives = {
|
||||
'auto_pickle': True,
|
||||
'c_string_type': 'bytearray',
|
||||
'c_string_type': 'bytes',
|
||||
'c_string_type': 'str',
|
||||
'c_string_type': 'bytearray',
|
||||
'c_string_type': 'unicode',
|
||||
'c_string_encoding' : 'ascii',
|
||||
'language_level' : 2,
|
||||
'language_level' : 3,
|
||||
'language_level' : '3str',
|
||||
'set_initial_path' : 'my_initial_path',
|
||||
}
|
||||
for key, value in directives.items():
|
||||
cmd = '{key}={value}'.format(key=key, value=str(value))
|
||||
options, args = self.parse_args(['-X', cmd])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['directives']), msg = "Error for option: "+cmd)
|
||||
self.assertEqual(options.directives[key], value, msg = "Error for option: "+cmd)
|
||||
|
||||
def test_directives_wrong(self):
|
||||
directives = {
|
||||
'auto_pickle': 42, # for bool type
|
||||
'auto_pickle': 'NONONO', # for bool type
|
||||
'c_string_type': 'bites',
|
||||
#'c_string_encoding' : 'a',
|
||||
#'language_level' : 4,
|
||||
}
|
||||
for key, value in directives.items():
|
||||
cmd = '{key}={value}'.format(key=key, value=str(value))
|
||||
with self.assertRaises(ValueError, msg = "Error for option: "+cmd) as context:
|
||||
options, args = self.parse_args(['-X', cmd])
|
||||
|
||||
def test_compile_time_env_short(self):
|
||||
options, args = self.parse_args(['-E', 'MYSIZE=10'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
||||
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
||||
|
||||
def test_compile_time_env_long(self):
|
||||
options, args = self.parse_args(['--compile-time-env', 'MYSIZE=10'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
||||
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
||||
|
||||
def test_compile_time_env_multiple(self):
|
||||
options, args = self.parse_args(['-E', 'MYSIZE=10', '-E', 'ARRSIZE=11'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
||||
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
||||
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
||||
|
||||
def test_compile_time_env_multiple_v2(self):
|
||||
options, args = self.parse_args(['-E', 'MYSIZE=10,ARRSIZE=11'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
||||
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
||||
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
||||
|
||||
#testing options
|
||||
def test_option_short(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=True'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_long(self):
|
||||
options, args = self.parse_args(['--option', 'docstrings=True'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_multiple(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=True', '-s', 'buffer_max_dims=8'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
||||
|
||||
def test_option_multiple_v2(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
||||
|
||||
def test_option_value_yes(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=YeS'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_value_4242(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=4242'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_value_0(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=0'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], False)
|
||||
|
||||
def test_option_value_emptystr(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings='])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_value_a_str(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=BB'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_value_no(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings=nO'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], False)
|
||||
|
||||
def test_option_no_value(self):
|
||||
options, args = self.parse_args(['-s', 'docstrings'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['docstrings'], True)
|
||||
|
||||
def test_option_any_key(self):
|
||||
options, args = self.parse_args(['-s', 'abracadabra'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['options']))
|
||||
self.assertEqual(options.options['abracadabra'], True)
|
||||
|
||||
def test_language_level_2(self):
|
||||
options, args = self.parse_args(['-2'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['language_level']))
|
||||
self.assertEqual(options.language_level, 2)
|
||||
|
||||
def test_language_level_3(self):
|
||||
options, args = self.parse_args(['-3'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['language_level']))
|
||||
self.assertEqual(options.language_level, 3)
|
||||
|
||||
def test_language_level_3str(self):
|
||||
options, args = self.parse_args(['--3str'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['language_level']))
|
||||
self.assertEqual(options.language_level, '3str')
|
||||
|
||||
def test_annotate_short(self):
|
||||
options, args = self.parse_args(['-a'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
|
||||
def test_annotate_long(self):
|
||||
options, args = self.parse_args(['--annotate'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
|
||||
def test_annotate_fullc(self):
|
||||
options, args = self.parse_args(['--annotate-fullc'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
self.assertEqual(options.annotate, 'fullc')
|
||||
|
||||
def test_annotate_and_positional(self):
|
||||
options, args = self.parse_args(['-a', 'foo.pyx'])
|
||||
self.assertEqual(args, ['foo.pyx'])
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
|
||||
def test_annotate_and_optional(self):
|
||||
options, args = self.parse_args(['-a', '--3str'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['annotate', 'language_level']))
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
self.assertEqual(options.language_level, '3str')
|
||||
|
||||
def test_exclude_short(self):
|
||||
options, args = self.parse_args(['-x', '*.pyx'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['excludes']))
|
||||
self.assertTrue('*.pyx' in options.excludes)
|
||||
|
||||
def test_exclude_long(self):
|
||||
options, args = self.parse_args(['--exclude', '*.pyx'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['excludes']))
|
||||
self.assertTrue('*.pyx' in options.excludes)
|
||||
|
||||
def test_exclude_multiple(self):
|
||||
options, args = self.parse_args(['--exclude', '*.pyx', '--exclude', '*.py', ])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['excludes']))
|
||||
self.assertEqual(options.excludes, ['*.pyx', '*.py'])
|
||||
|
||||
def test_build_short(self):
|
||||
options, args = self.parse_args(['-b'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['build']))
|
||||
self.assertEqual(options.build, True)
|
||||
|
||||
def test_build_long(self):
|
||||
options, args = self.parse_args(['--build'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['build']))
|
||||
self.assertEqual(options.build, True)
|
||||
|
||||
def test_inplace_short(self):
|
||||
options, args = self.parse_args(['-i'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['build_inplace']))
|
||||
self.assertEqual(options.build_inplace, True)
|
||||
|
||||
def test_inplace_long(self):
|
||||
options, args = self.parse_args(['--inplace'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['build_inplace']))
|
||||
self.assertEqual(options.build_inplace, True)
|
||||
|
||||
def test_parallel_short(self):
|
||||
options, args = self.parse_args(['-j', '42'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['parallel']))
|
||||
self.assertEqual(options.parallel, 42)
|
||||
|
||||
def test_parallel_long(self):
|
||||
options, args = self.parse_args(['--parallel', '42'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['parallel']))
|
||||
self.assertEqual(options.parallel, 42)
|
||||
|
||||
def test_force_short(self):
|
||||
options, args = self.parse_args(['-f'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['force']))
|
||||
self.assertEqual(options.force, True)
|
||||
|
||||
def test_force_long(self):
|
||||
options, args = self.parse_args(['--force'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['force']))
|
||||
self.assertEqual(options.force, True)
|
||||
|
||||
def test_quite_short(self):
|
||||
options, args = self.parse_args(['-q'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['quiet']))
|
||||
self.assertEqual(options.quiet, True)
|
||||
|
||||
def test_quite_long(self):
|
||||
options, args = self.parse_args(['--quiet'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['quiet']))
|
||||
self.assertEqual(options.quiet, True)
|
||||
|
||||
def test_lenient_long(self):
|
||||
options, args = self.parse_args(['--lenient'])
|
||||
self.assertTrue(self.are_default(options, ['lenient']))
|
||||
self.assertFalse(args)
|
||||
self.assertEqual(options.lenient, True)
|
||||
|
||||
def test_keep_going_short(self):
|
||||
options, args = self.parse_args(['-k'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['keep_going']))
|
||||
self.assertEqual(options.keep_going, True)
|
||||
|
||||
def test_keep_going_long(self):
|
||||
options, args = self.parse_args(['--keep-going'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['keep_going']))
|
||||
self.assertEqual(options.keep_going, True)
|
||||
|
||||
def test_no_docstrings_long(self):
|
||||
options, args = self.parse_args(['--no-docstrings'])
|
||||
self.assertFalse(args)
|
||||
self.assertTrue(self.are_default(options, ['no_docstrings']))
|
||||
self.assertEqual(options.no_docstrings, True)
|
||||
|
||||
def test_file_name(self):
|
||||
options, args = self.parse_args(['file1.pyx', 'file2.pyx'])
|
||||
self.assertEqual(len(args), 2)
|
||||
self.assertEqual(args[0], 'file1.pyx')
|
||||
self.assertEqual(args[1], 'file2.pyx')
|
||||
self.assertTrue(self.are_default(options, []))
|
||||
|
||||
def test_option_first(self):
|
||||
options, args = self.parse_args(['-i', 'file.pyx'])
|
||||
self.assertEqual(args, ['file.pyx'])
|
||||
self.assertEqual(options.build_inplace, True)
|
||||
self.assertTrue(self.are_default(options, ['build_inplace']))
|
||||
|
||||
def test_file_inbetween(self):
|
||||
options, args = self.parse_args(['-i', 'file.pyx', '-a'])
|
||||
self.assertEqual(args, ['file.pyx'])
|
||||
self.assertEqual(options.build_inplace, True)
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
self.assertTrue(self.are_default(options, ['build_inplace', 'annotate']))
|
||||
|
||||
def test_option_trailing(self):
|
||||
options, args = self.parse_args(['file.pyx', '-i'])
|
||||
self.assertEqual(args, ['file.pyx'])
|
||||
self.assertEqual(options.build_inplace, True)
|
||||
self.assertTrue(self.are_default(options, ['build_inplace']))
|
||||
|
||||
def test_interspersed_positional(self):
|
||||
options, sources = self.parse_args([
|
||||
'file1.pyx', '-a',
|
||||
'file2.pyx'
|
||||
])
|
||||
self.assertEqual(sources, ['file1.pyx', 'file2.pyx'])
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
|
||||
def test_interspersed_positional2(self):
|
||||
options, sources = self.parse_args([
|
||||
'file1.pyx', '-a',
|
||||
'file2.pyx', '-a', 'file3.pyx'
|
||||
])
|
||||
self.assertEqual(sources, ['file1.pyx', 'file2.pyx', 'file3.pyx'])
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
self.assertTrue(self.are_default(options, ['annotate']))
|
||||
|
||||
def test_interspersed_positional3(self):
|
||||
options, sources = self.parse_args([
|
||||
'-f', 'f1', 'f2', '-a',
|
||||
'f3', 'f4', '-a', 'f5'
|
||||
])
|
||||
self.assertEqual(sources, ['f1', 'f2', 'f3', 'f4', 'f5'])
|
||||
self.assertEqual(options.annotate, 'default')
|
||||
self.assertEqual(options.force, True)
|
||||
self.assertTrue(self.are_default(options, ['annotate', 'force']))
|
||||
|
||||
def test_wrong_option(self):
|
||||
old_stderr = sys.stderr
|
||||
stderr = sys.stderr = StringIO()
|
||||
try:
|
||||
self.assertRaises(SystemExit, self.parse_args,
|
||||
['--unknown-option']
|
||||
)
|
||||
finally:
|
||||
sys.stderr = old_stderr
|
||||
self.assertTrue(stderr.getvalue())
|
||||
|
||||
|
||||
class TestParseArgs(TestCase):
|
||||
def setUp(self):
|
||||
self._options_backup = backup_Options()
|
||||
|
||||
def tearDown(self):
|
||||
restore_Options(self._options_backup)
|
||||
|
||||
def check_default_global_options(self, white_list=[]):
|
||||
self.assertEqual(check_global_options(self._options_backup, white_list), "")
|
||||
|
||||
def test_build_set_for_inplace(self):
|
||||
options, args = parse_args(['foo.pyx', '-i'])
|
||||
self.assertEqual(options.build, True)
|
||||
self.check_default_global_options()
|
||||
|
||||
def test_lenient(self):
|
||||
options, sources = parse_args(['foo.pyx', '--lenient'])
|
||||
self.assertEqual(sources, ['foo.pyx'])
|
||||
self.assertEqual(Options.error_on_unknown_names, False)
|
||||
self.assertEqual(Options.error_on_uninitialized, False)
|
||||
self.check_default_global_options(['error_on_unknown_names', 'error_on_uninitialized'])
|
||||
|
||||
def test_annotate(self):
|
||||
options, sources = parse_args(['foo.pyx', '--annotate'])
|
||||
self.assertEqual(sources, ['foo.pyx'])
|
||||
self.assertEqual(Options.annotate, 'default')
|
||||
self.check_default_global_options(['annotate'])
|
||||
|
||||
def test_annotate_fullc(self):
|
||||
options, sources = parse_args(['foo.pyx', '--annotate-fullc'])
|
||||
self.assertEqual(sources, ['foo.pyx'])
|
||||
self.assertEqual(Options.annotate, 'fullc')
|
||||
self.check_default_global_options(['annotate'])
|
||||
|
||||
def test_no_docstrings(self):
|
||||
options, sources = parse_args(['foo.pyx', '--no-docstrings'])
|
||||
self.assertEqual(sources, ['foo.pyx'])
|
||||
self.assertEqual(Options.docstrings, False)
|
||||
self.check_default_global_options(['docstrings'])
|
||||
@ -0,0 +1,142 @@
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from io import open
|
||||
from os.path import join as pjoin
|
||||
|
||||
from ..Dependencies import extended_iglob
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def writable_file(dir_path, filename):
|
||||
with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
|
||||
yield f
|
||||
|
||||
|
||||
class TestGlobbing(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls._orig_dir = os.getcwd()
|
||||
if sys.version_info[0] < 3:
|
||||
temp_path = cls._tmpdir = tempfile.mkdtemp()
|
||||
else:
|
||||
cls._tmpdir = tempfile.TemporaryDirectory()
|
||||
temp_path = cls._tmpdir.name
|
||||
os.chdir(temp_path)
|
||||
|
||||
for dir1 in "abcd":
|
||||
for dir1x in [dir1, dir1 + 'x']:
|
||||
for dir2 in "xyz":
|
||||
dir_path = pjoin(dir1x, dir2)
|
||||
os.makedirs(dir_path)
|
||||
with writable_file(dir_path, "file2_pyx.pyx") as f:
|
||||
f.write(u'""" PYX """')
|
||||
with writable_file(dir_path, "file2_py.py") as f:
|
||||
f.write(u'""" PY """')
|
||||
|
||||
with writable_file(dir1x, "file1_pyx.pyx") as f:
|
||||
f.write(u'""" PYX """')
|
||||
with writable_file(dir1x, "file1_py.py") as f:
|
||||
f.write(u'""" PY """')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
os.chdir(cls._orig_dir)
|
||||
if sys.version_info[0] < 3:
|
||||
import shutil
|
||||
shutil.rmtree(cls._tmpdir)
|
||||
else:
|
||||
cls._tmpdir.cleanup()
|
||||
|
||||
def files_equal(self, pattern, expected_files):
|
||||
expected_files = sorted(expected_files)
|
||||
# It's the users's choice whether '/' will appear on Windows.
|
||||
matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
|
||||
self.assertListEqual(matched_files, expected_files) # /
|
||||
|
||||
# Special case for Windows: also support '\' in patterns.
|
||||
if os.sep == '\\' and '/' in pattern:
|
||||
matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
|
||||
self.assertListEqual(matched_files, expected_files) # \
|
||||
|
||||
def test_extended_iglob_simple(self):
|
||||
ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
|
||||
self.files_equal("a/x/*", ax_files)
|
||||
self.files_equal("a/x/*.c12", [])
|
||||
self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
|
||||
self.files_equal("a/x/*.{py,pyx}", ax_files)
|
||||
self.files_equal("a/x/*.{pyx}", ax_files[:1])
|
||||
self.files_equal("a/x/*.pyx", ax_files[:1])
|
||||
self.files_equal("a/x/*.{py}", ax_files[1:])
|
||||
self.files_equal("a/x/*.py", ax_files[1:])
|
||||
|
||||
def test_extended_iglob_simple_star(self):
|
||||
for basedir in "ad":
|
||||
files = [
|
||||
pjoin(basedir, dirname, filename)
|
||||
for dirname in "xyz"
|
||||
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
||||
]
|
||||
self.files_equal(basedir + "/*/*", files)
|
||||
self.files_equal(basedir + "/*/*.c12", [])
|
||||
self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
|
||||
self.files_equal(basedir + "/*/*.{py,pyx}", files)
|
||||
self.files_equal(basedir + "/*/*.{pyx}", files[::2])
|
||||
self.files_equal(basedir + "/*/*.pyx", files[::2])
|
||||
self.files_equal(basedir + "/*/*.{py}", files[1::2])
|
||||
self.files_equal(basedir + "/*/*.py", files[1::2])
|
||||
|
||||
for subdir in "xy*":
|
||||
files = [
|
||||
pjoin(basedir, dirname, filename)
|
||||
for dirname in "xyz"
|
||||
if subdir in ('*', dirname)
|
||||
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
||||
]
|
||||
path = basedir + '/' + subdir + '/'
|
||||
self.files_equal(path + "*", files)
|
||||
self.files_equal(path + "*.{py,pyx}", files)
|
||||
self.files_equal(path + "*.{pyx}", files[::2])
|
||||
self.files_equal(path + "*.pyx", files[::2])
|
||||
self.files_equal(path + "*.{py}", files[1::2])
|
||||
self.files_equal(path + "*.py", files[1::2])
|
||||
|
||||
def test_extended_iglob_double_star(self):
|
||||
basedirs = os.listdir(".")
|
||||
files = [
|
||||
pjoin(basedir, dirname, filename)
|
||||
for basedir in basedirs
|
||||
for dirname in "xyz"
|
||||
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
||||
]
|
||||
all_files = [
|
||||
pjoin(basedir, filename)
|
||||
for basedir in basedirs
|
||||
for filename in ["file1_pyx.pyx", "file1_py.py"]
|
||||
] + files
|
||||
self.files_equal("*/*/*", files)
|
||||
self.files_equal("*/*/**/*", files)
|
||||
self.files_equal("*/**/*.*", all_files)
|
||||
self.files_equal("**/*.*", all_files)
|
||||
self.files_equal("*/**/*.c12", [])
|
||||
self.files_equal("**/*.c12", [])
|
||||
self.files_equal("*/*/*.{py,pyx,c12}", files)
|
||||
self.files_equal("*/*/**/*.{py,pyx,c12}", files)
|
||||
self.files_equal("*/**/*/*.{py,pyx,c12}", files)
|
||||
self.files_equal("**/*/*/*.{py,pyx,c12}", files)
|
||||
self.files_equal("**/*.{py,pyx,c12}", all_files)
|
||||
self.files_equal("*/*/*.{py,pyx}", files)
|
||||
self.files_equal("**/*/*/*.{py,pyx}", files)
|
||||
self.files_equal("*/**/*/*.{py,pyx}", files)
|
||||
self.files_equal("**/*.{py,pyx}", all_files)
|
||||
self.files_equal("*/*/*.{pyx}", files[::2])
|
||||
self.files_equal("**/*.{pyx}", all_files[::2])
|
||||
self.files_equal("*/**/*/*.pyx", files[::2])
|
||||
self.files_equal("*/*/*.pyx", files[::2])
|
||||
self.files_equal("**/*.pyx", all_files[::2])
|
||||
self.files_equal("*/*/*.{py}", files[1::2])
|
||||
self.files_equal("**/*.{py}", all_files[1::2])
|
||||
self.files_equal("*/*/*.py", files[1::2])
|
||||
self.files_equal("**/*.py", all_files[1::2])
|
||||
@ -0,0 +1,112 @@
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from Cython.Shadow import inline
|
||||
from Cython.Build.Inline import safe_type
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
try:
|
||||
import numpy
|
||||
has_numpy = True
|
||||
except:
|
||||
has_numpy = False
|
||||
|
||||
test_kwds = dict(force=True, quiet=True)
|
||||
|
||||
global_value = 100
|
||||
|
||||
class TestInline(CythonTest):
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self._call_kwds = dict(test_kwds)
|
||||
if os.path.isdir('TEST_TMP'):
|
||||
lib_dir = os.path.join('TEST_TMP','inline')
|
||||
else:
|
||||
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
||||
self._call_kwds['lib_dir'] = lib_dir
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(inline("return 1+2", **self._call_kwds), 3)
|
||||
|
||||
def test_types(self):
|
||||
self.assertEqual(inline("""
|
||||
cimport cython
|
||||
return cython.typeof(a), cython.typeof(b)
|
||||
""", a=1.0, b=[], **self._call_kwds), ('double', 'list object'))
|
||||
|
||||
def test_locals(self):
|
||||
a = 1
|
||||
b = 2
|
||||
self.assertEqual(inline("return a+b", **self._call_kwds), 3)
|
||||
|
||||
def test_globals(self):
|
||||
self.assertEqual(inline("return global_value + 1", **self._call_kwds), global_value + 1)
|
||||
|
||||
def test_no_return(self):
|
||||
self.assertEqual(inline("""
|
||||
a = 1
|
||||
cdef double b = 2
|
||||
cdef c = []
|
||||
""", **self._call_kwds), dict(a=1, b=2.0, c=[]))
|
||||
|
||||
def test_def_node(self):
|
||||
foo = inline("def foo(x): return x * x", **self._call_kwds)['foo']
|
||||
self.assertEqual(foo(7), 49)
|
||||
|
||||
def test_class_ref(self):
|
||||
class Type(object):
|
||||
pass
|
||||
tp = inline("Type")['Type']
|
||||
self.assertEqual(tp, Type)
|
||||
|
||||
def test_pure(self):
|
||||
import cython as cy
|
||||
b = inline("""
|
||||
b = cy.declare(float, a)
|
||||
c = cy.declare(cy.pointer(cy.float), &b)
|
||||
return b
|
||||
""", a=3, **self._call_kwds)
|
||||
self.assertEqual(type(b), float)
|
||||
|
||||
def test_compiler_directives(self):
|
||||
self.assertEqual(
|
||||
inline('return sum(x)',
|
||||
x=[1, 2, 3],
|
||||
cython_compiler_directives={'boundscheck': False}),
|
||||
6
|
||||
)
|
||||
|
||||
def test_lang_version(self):
|
||||
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
||||
inline_divcode = "def f(int a, int b): return a/b"
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=2)['f'](5,2),
|
||||
2
|
||||
)
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=3)['f'](5,2),
|
||||
2.5
|
||||
)
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=2)['f'](5,2),
|
||||
2
|
||||
)
|
||||
|
||||
def test_repeated_use(self):
|
||||
inline_mulcode = "def f(int a, int b): return a * b"
|
||||
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
||||
self.assertEqual(inline(inline_mulcode)['f'](5, 3), 15)
|
||||
self.assertEqual(inline(inline_mulcode)['f'](6, 2), 12)
|
||||
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
||||
|
||||
f = inline(inline_mulcode)['f']
|
||||
self.assertEqual(f(5, 2), 10)
|
||||
self.assertEqual(f(5, 3), 15)
|
||||
|
||||
@unittest.skipIf(not has_numpy, "NumPy is not available")
|
||||
def test_numpy(self):
|
||||
import numpy
|
||||
a = numpy.ndarray((10, 20))
|
||||
a[0,0] = 10
|
||||
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
||||
self.assertEqual(inline("return a[0,0]", a=a, **self._call_kwds), 10.0)
|
||||
@ -0,0 +1,295 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# tag: ipython
|
||||
|
||||
"""Tests for the Cython magics extension."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import io
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from unittest import skipIf
|
||||
|
||||
from Cython.Build import IpythonMagic
|
||||
from Cython.TestUtils import CythonTest
|
||||
from Cython.Compiler.Annotate import AnnotationCCodeWriter
|
||||
|
||||
try:
|
||||
import IPython.testing.globalipapp
|
||||
except ImportError:
|
||||
# Disable tests and fake helpers for initialisation below.
|
||||
def skip_if_not_installed(_):
|
||||
return None
|
||||
else:
|
||||
def skip_if_not_installed(c):
|
||||
return c
|
||||
|
||||
# not using IPython's decorators here because they depend on "nose"
|
||||
skip_win32 = skipIf(sys.platform == 'win32', "Skip on Windows")
|
||||
skip_py27 = skipIf(sys.version_info[:2] == (2,7), "Disabled in Py2.7")
|
||||
|
||||
try:
|
||||
# disable IPython history thread before it gets started to avoid having to clean it up
|
||||
from IPython.core.history import HistoryManager
|
||||
HistoryManager.enabled = False
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
def capture_output():
|
||||
backup = sys.stdout, sys.stderr
|
||||
try:
|
||||
replacement = [
|
||||
io.TextIOWrapper(io.BytesIO(), encoding=sys.stdout.encoding),
|
||||
io.TextIOWrapper(io.BytesIO(), encoding=sys.stderr.encoding),
|
||||
]
|
||||
sys.stdout, sys.stderr = replacement
|
||||
output = []
|
||||
yield output
|
||||
finally:
|
||||
sys.stdout, sys.stderr = backup
|
||||
for wrapper in replacement:
|
||||
wrapper.seek(0) # rewind
|
||||
output.append(wrapper.read())
|
||||
wrapper.close()
|
||||
|
||||
|
||||
code = u"""\
|
||||
def f(x):
|
||||
return 2*x
|
||||
"""
|
||||
|
||||
cython3_code = u"""\
|
||||
def f(int x):
|
||||
return 2 / x
|
||||
|
||||
def call(x):
|
||||
return f(*(x,))
|
||||
"""
|
||||
|
||||
pgo_cython3_code = cython3_code + u"""\
|
||||
def main():
|
||||
for _ in range(100): call(5)
|
||||
main()
|
||||
"""
|
||||
|
||||
compile_error_code = u'''\
|
||||
cdef extern from *:
|
||||
"""
|
||||
xxx a=1;
|
||||
"""
|
||||
int a;
|
||||
def doit():
|
||||
return a
|
||||
'''
|
||||
|
||||
compile_warning_code = u'''\
|
||||
cdef extern from *:
|
||||
"""
|
||||
#pragma message ( "CWarning" )
|
||||
int a = 42;
|
||||
"""
|
||||
int a;
|
||||
def doit():
|
||||
return a
|
||||
'''
|
||||
|
||||
|
||||
@skip_if_not_installed
|
||||
class TestIPythonMagic(CythonTest):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
CythonTest.setUpClass()
|
||||
cls._ip = IPython.testing.globalipapp.get_ipython()
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self._ip.extension_manager.load_extension('cython')
|
||||
|
||||
def test_cython_inline(self):
|
||||
ip = self._ip
|
||||
ip.ex('a=10; b=20')
|
||||
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
|
||||
self.assertEqual(result, 30)
|
||||
|
||||
@skip_win32
|
||||
def test_cython_pyximport(self):
|
||||
ip = self._ip
|
||||
module_name = '_test_cython_pyximport'
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('h = f(-10)')
|
||||
self.assertEqual(ip.user_ns['h'], -20.0)
|
||||
try:
|
||||
os.remove(module_name + '.pyx')
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_cython(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_name(self):
|
||||
# The Cython module named 'mymodule' defines the function f.
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--name=mymodule', code)
|
||||
# This module can now be imported in the interactive namespace.
|
||||
ip.ex('import mymodule; g = mymodule.f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_language_level(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
if sys.version_info[0] < 3:
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
else:
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython3(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython2(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-2', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
|
||||
def test_cython_compile_error_shown(self):
|
||||
ip = self._ip
|
||||
with capture_output() as out:
|
||||
ip.run_cell_magic('cython', '-3', compile_error_code)
|
||||
captured_out, captured_err = out
|
||||
|
||||
# it could be that c-level output is captured by distutil-extension
|
||||
# (and not by us) and is printed to stdout:
|
||||
captured_all = captured_out + "\n" + captured_err
|
||||
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
||||
|
||||
def test_cython_link_error_shown(self):
|
||||
ip = self._ip
|
||||
with capture_output() as out:
|
||||
ip.run_cell_magic('cython', '-3 -l=xxxxxxxx', code)
|
||||
captured_out, captured_err = out
|
||||
|
||||
# it could be that c-level output is captured by distutil-extension
|
||||
# (and not by us) and is printed to stdout:
|
||||
captured_all = captured_out + "\n!" + captured_err
|
||||
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
||||
|
||||
def test_cython_warning_shown(self):
|
||||
ip = self._ip
|
||||
with capture_output() as out:
|
||||
# force rebuild, otherwise no warning as after the first success
|
||||
# no build step is performed
|
||||
ip.run_cell_magic('cython', '-3 -f', compile_warning_code)
|
||||
captured_out, captured_err = out
|
||||
|
||||
# check that warning was printed to stdout even if build hasn't failed
|
||||
self.assertTrue("CWarning" in captured_out)
|
||||
|
||||
@skip_py27 # Not strictly broken in Py2.7 but currently fails in CI due to C compiler issues.
|
||||
@skip_win32
|
||||
def test_cython3_pgo(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
|
||||
ip.ex('g = f(10); h = call(10); main()')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
@skip_win32
|
||||
def test_extlibs(self):
|
||||
ip = self._ip
|
||||
code = u"""
|
||||
from libc.math cimport sin
|
||||
x = sin(0.0)
|
||||
"""
|
||||
ip.user_ns['x'] = 1
|
||||
ip.run_cell_magic('cython', '-l m', code)
|
||||
self.assertEqual(ip.user_ns['x'], 0)
|
||||
|
||||
|
||||
def test_cython_verbose(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_verbose_thresholds(self):
|
||||
@contextmanager
|
||||
def mock_distutils():
|
||||
class MockLog:
|
||||
DEBUG = 1
|
||||
INFO = 2
|
||||
thresholds = [INFO]
|
||||
|
||||
def set_threshold(self, val):
|
||||
self.thresholds.append(val)
|
||||
return self.thresholds[-2]
|
||||
|
||||
|
||||
new_log = MockLog()
|
||||
old_log = IpythonMagic.distutils.log
|
||||
try:
|
||||
IpythonMagic.distutils.log = new_log
|
||||
yield new_log
|
||||
finally:
|
||||
IpythonMagic.distutils.log = old_log
|
||||
|
||||
ip = self._ip
|
||||
with mock_distutils() as verbose_log:
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
|
||||
verbose_log.thresholds)
|
||||
|
||||
with mock_distutils() as normal_log:
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([normal_log.INFO], normal_log.thresholds)
|
||||
|
||||
def test_cython_no_annotate(self):
|
||||
ip = self._ip
|
||||
html = ip.run_cell_magic('cython', '', code)
|
||||
self.assertTrue(html is None)
|
||||
|
||||
def test_cython_annotate(self):
|
||||
ip = self._ip
|
||||
html = ip.run_cell_magic('cython', '--annotate', code)
|
||||
# somewhat brittle way to differentiate between annotated htmls
|
||||
# with/without complete source code:
|
||||
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
||||
|
||||
def test_cython_annotate_default(self):
|
||||
ip = self._ip
|
||||
html = ip.run_cell_magic('cython', '-a', code)
|
||||
# somewhat brittle way to differentiate between annotated htmls
|
||||
# with/without complete source code:
|
||||
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
||||
|
||||
def test_cython_annotate_complete_c_code(self):
|
||||
ip = self._ip
|
||||
html = ip.run_cell_magic('cython', '--annotate-fullc', code)
|
||||
# somewhat brittle way to differentiate between annotated htmls
|
||||
# with/without complete source code:
|
||||
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE in html.data)
|
||||
@ -0,0 +1,212 @@
|
||||
import shutil
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import Cython.Build.Dependencies
|
||||
import Cython.Utils
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
|
||||
def fresh_cythonize(*args, **kwargs):
|
||||
Cython.Utils.clear_function_caches()
|
||||
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
||||
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
||||
|
||||
class TestRecythonize(CythonTest):
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.temp_dir = (
|
||||
tempfile.mkdtemp(
|
||||
prefix='recythonize-test',
|
||||
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None
|
||||
)
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
CythonTest.tearDown(self)
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_recythonize_pyx_on_pxd_change(self):
|
||||
|
||||
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
|
||||
a_pxd = os.path.join(src_dir, 'a.pxd')
|
||||
a_pyx = os.path.join(src_dir, 'a.pyx')
|
||||
a_c = os.path.join(src_dir, 'a.c')
|
||||
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef int value\n')
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write('value = 1\n')
|
||||
|
||||
|
||||
# The dependencies for "a.pyx" are "a.pxd" and "a.pyx".
|
||||
self.assertEqual({a_pxd, a_pyx}, dep_tree.all_dependencies(a_pyx))
|
||||
|
||||
# Cythonize to create a.c
|
||||
fresh_cythonize(a_pyx)
|
||||
|
||||
# Sleep to address coarse time-stamp precision.
|
||||
time.sleep(1)
|
||||
|
||||
with open(a_c) as f:
|
||||
a_c_contents1 = f.read()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef double value\n')
|
||||
|
||||
fresh_cythonize(a_pyx)
|
||||
|
||||
with open(a_c) as f:
|
||||
a_c_contents2 = f.read()
|
||||
|
||||
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
||||
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
||||
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
||||
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
||||
|
||||
|
||||
def test_recythonize_py_on_pxd_change(self):
|
||||
|
||||
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
|
||||
a_pxd = os.path.join(src_dir, 'a.pxd')
|
||||
a_py = os.path.join(src_dir, 'a.py')
|
||||
a_c = os.path.join(src_dir, 'a.c')
|
||||
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef int value\n')
|
||||
|
||||
with open(a_py, 'w') as f:
|
||||
f.write('value = 1\n')
|
||||
|
||||
|
||||
# The dependencies for "a.py" are "a.pxd" and "a.py".
|
||||
self.assertEqual({a_pxd, a_py}, dep_tree.all_dependencies(a_py))
|
||||
|
||||
# Cythonize to create a.c
|
||||
fresh_cythonize(a_py)
|
||||
|
||||
# Sleep to address coarse time-stamp precision.
|
||||
time.sleep(1)
|
||||
|
||||
with open(a_c) as f:
|
||||
a_c_contents1 = f.read()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef double value\n')
|
||||
|
||||
fresh_cythonize(a_py)
|
||||
|
||||
with open(a_c) as f:
|
||||
a_c_contents2 = f.read()
|
||||
|
||||
|
||||
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
||||
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
||||
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
||||
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
||||
|
||||
def test_recythonize_pyx_on_dep_pxd_change(self):
|
||||
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
|
||||
a_pxd = os.path.join(src_dir, 'a.pxd')
|
||||
a_pyx = os.path.join(src_dir, 'a.pyx')
|
||||
b_pyx = os.path.join(src_dir, 'b.pyx')
|
||||
b_c = os.path.join(src_dir, 'b.c')
|
||||
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef int value\n')
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write('value = 1\n')
|
||||
|
||||
with open(b_pyx, 'w') as f:
|
||||
f.write('cimport a\n' + 'a.value = 2\n')
|
||||
|
||||
|
||||
# The dependencies for "b.pyx" are "a.pxd" and "b.pyx".
|
||||
self.assertEqual({a_pxd, b_pyx}, dep_tree.all_dependencies(b_pyx))
|
||||
|
||||
|
||||
# Cythonize to create b.c
|
||||
fresh_cythonize([a_pyx, b_pyx])
|
||||
|
||||
# Sleep to address coarse time-stamp precision.
|
||||
time.sleep(1)
|
||||
|
||||
with open(b_c) as f:
|
||||
b_c_contents1 = f.read()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef double value\n')
|
||||
|
||||
fresh_cythonize([a_pyx, b_pyx])
|
||||
|
||||
with open(b_c) as f:
|
||||
b_c_contents2 = f.read()
|
||||
|
||||
|
||||
|
||||
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
||||
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
||||
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
||||
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
||||
|
||||
|
||||
|
||||
def test_recythonize_py_on_dep_pxd_change(self):
|
||||
|
||||
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
|
||||
a_pxd = os.path.join(src_dir, 'a.pxd')
|
||||
a_pyx = os.path.join(src_dir, 'a.pyx')
|
||||
b_pxd = os.path.join(src_dir, 'b.pxd')
|
||||
b_py = os.path.join(src_dir, 'b.py')
|
||||
b_c = os.path.join(src_dir, 'b.c')
|
||||
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef int value\n')
|
||||
|
||||
with open(a_pyx, 'w') as f:
|
||||
f.write('value = 1\n')
|
||||
|
||||
with open(b_pxd, 'w') as f:
|
||||
f.write('cimport a\n')
|
||||
|
||||
with open(b_py, 'w') as f:
|
||||
f.write('a.value = 2\n')
|
||||
|
||||
|
||||
# The dependencies for b.py are "a.pxd", "b.pxd" and "b.py".
|
||||
self.assertEqual({a_pxd, b_pxd, b_py}, dep_tree.all_dependencies(b_py))
|
||||
|
||||
|
||||
# Cythonize to create b.c
|
||||
fresh_cythonize([a_pyx, b_py])
|
||||
|
||||
# Sleep to address coarse time-stamp precision.
|
||||
time.sleep(1)
|
||||
|
||||
with open(b_c) as f:
|
||||
b_c_contents1 = f.read()
|
||||
|
||||
with open(a_pxd, 'w') as f:
|
||||
f.write('cdef double value\n')
|
||||
|
||||
fresh_cythonize([a_pyx, b_py])
|
||||
|
||||
with open(b_c) as f:
|
||||
b_c_contents2 = f.read()
|
||||
|
||||
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
||||
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
||||
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
||||
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
||||
@ -0,0 +1,56 @@
|
||||
from Cython.Build.Dependencies import strip_string_literals
|
||||
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
class TestStripLiterals(CythonTest):
|
||||
|
||||
def t(self, before, expected):
|
||||
actual, literals = strip_string_literals(before, prefix="_L")
|
||||
self.assertEqual(expected, actual)
|
||||
for key, value in literals.items():
|
||||
actual = actual.replace(key, value)
|
||||
self.assertEqual(before, actual)
|
||||
|
||||
def test_empty(self):
|
||||
self.t("", "")
|
||||
|
||||
def test_single_quote(self):
|
||||
self.t("'x'", "'_L1_'")
|
||||
|
||||
def test_double_quote(self):
|
||||
self.t('"x"', '"_L1_"')
|
||||
|
||||
def test_nested_quotes(self):
|
||||
self.t(""" '"' "'" """, """ '_L1_' "_L2_" """)
|
||||
|
||||
def test_triple_quote(self):
|
||||
self.t(" '''a\n''' ", " '''_L1_''' ")
|
||||
|
||||
def test_backslash(self):
|
||||
self.t(r"'a\'b'", "'_L1_'")
|
||||
self.t(r"'a\\'", "'_L1_'")
|
||||
self.t(r"'a\\\'b'", "'_L1_'")
|
||||
|
||||
def test_unicode(self):
|
||||
self.t("u'abc'", "u'_L1_'")
|
||||
|
||||
def test_raw(self):
|
||||
self.t(r"r'abc\\'", "r'_L1_'")
|
||||
|
||||
def test_raw_unicode(self):
|
||||
self.t(r"ru'abc\\'", "ru'_L1_'")
|
||||
|
||||
def test_comment(self):
|
||||
self.t("abc # foo", "abc #_L1_")
|
||||
|
||||
def test_comment_and_quote(self):
|
||||
self.t("abc # 'x'", "abc #_L1_")
|
||||
self.t("'abc#'", "'_L1_'")
|
||||
|
||||
def test_include(self):
|
||||
self.t("include 'a.pxi' # something here",
|
||||
"include '_L1_' #_L2_")
|
||||
|
||||
def test_extern(self):
|
||||
self.t("cdef extern from 'a.h': # comment",
|
||||
"cdef extern from '_L1_': #_L2_")
|
||||
@ -0,0 +1 @@
|
||||
# empty file
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
14
venv/lib/python3.11/site-packages/Cython/Build/__init__.py
Normal file
14
venv/lib/python3.11/site-packages/Cython/Build/__init__.py
Normal file
@ -0,0 +1,14 @@
|
||||
from .Dependencies import cythonize
|
||||
|
||||
import sys
|
||||
if sys.version_info < (3, 7):
|
||||
from .Distutils import build_ext
|
||||
del sys
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
if name == 'build_ext':
|
||||
# Lazy import, fails if distutils is not available (in Python 3.12+).
|
||||
from .Distutils import build_ext
|
||||
return build_ext
|
||||
raise AttributeError("module '%s' has no attribute '%s'" % (__name__, name))
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
820
venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
820
venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
@ -0,0 +1,820 @@
|
||||
"""
|
||||
Serializes a Cython code tree to Cython code. This is primarily useful for
|
||||
debugging and testing purposes.
|
||||
The output is in a strict format, no whitespace or comments from the input
|
||||
is preserved (and it could not be as it is not present in the code tree).
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Compiler.Visitor import TreeVisitor
|
||||
from .Compiler.ExprNodes import *
|
||||
from .Compiler.Nodes import CSimpleBaseTypeNode
|
||||
|
||||
|
||||
class LinesResult(object):
|
||||
def __init__(self):
|
||||
self.lines = []
|
||||
self.s = u""
|
||||
|
||||
def put(self, s):
|
||||
self.s += s
|
||||
|
||||
def newline(self):
|
||||
self.lines.append(self.s)
|
||||
self.s = u""
|
||||
|
||||
def putline(self, s):
|
||||
self.put(s)
|
||||
self.newline()
|
||||
|
||||
|
||||
class DeclarationWriter(TreeVisitor):
|
||||
"""
|
||||
A Cython code writer that is limited to declarations nodes.
|
||||
"""
|
||||
|
||||
indent_string = u" "
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(DeclarationWriter, self).__init__()
|
||||
if result is None:
|
||||
result = LinesResult()
|
||||
self.result = result
|
||||
self.numindents = 0
|
||||
self.tempnames = {}
|
||||
self.tempblockindex = 0
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def indent(self):
|
||||
self.numindents += 1
|
||||
|
||||
def dedent(self):
|
||||
self.numindents -= 1
|
||||
|
||||
def startline(self, s=u""):
|
||||
self.result.put(self.indent_string * self.numindents + s)
|
||||
|
||||
def put(self, s):
|
||||
self.result.put(s)
|
||||
|
||||
def putline(self, s):
|
||||
self.result.putline(self.indent_string * self.numindents + s)
|
||||
|
||||
def endline(self, s=u""):
|
||||
self.result.putline(s)
|
||||
|
||||
def line(self, s):
|
||||
self.startline(s)
|
||||
self.endline()
|
||||
|
||||
def comma_separated_list(self, items, output_rhs=False):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
if output_rhs and item.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(item.default)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
if output_rhs and items[-1].default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(items[-1].default)
|
||||
|
||||
def _visit_indented(self, node):
|
||||
self.indent()
|
||||
self.visit(node)
|
||||
self.dedent()
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_StatListNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_CDefExternNode(self, node):
|
||||
if node.include_file is None:
|
||||
file = u'*'
|
||||
else:
|
||||
file = u'"%s"' % node.include_file
|
||||
self.putline(u"cdef extern from %s:" % file)
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_CPtrDeclaratorNode(self, node):
|
||||
self.put('*')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CReferenceDeclaratorNode(self, node):
|
||||
self.put('&')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CArrayDeclaratorNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u'[')
|
||||
if node.dimension is not None:
|
||||
self.visit(node.dimension)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CFuncDeclaratorNode(self, node):
|
||||
# TODO: except, gil, etc.
|
||||
self.visit(node.base)
|
||||
self.put(u'(')
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u')')
|
||||
|
||||
def visit_CNameDeclaratorNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CSimpleBaseTypeNode(self, node):
|
||||
# See Parsing.p_sign_and_longness
|
||||
if node.is_basic_c_type:
|
||||
self.put(("unsigned ", "", "signed ")[node.signed])
|
||||
if node.longness < 0:
|
||||
self.put("short " * -node.longness)
|
||||
elif node.longness > 0:
|
||||
self.put("long " * node.longness)
|
||||
if node.name is not None:
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CComplexBaseTypeNode(self, node):
|
||||
self.visit(node.base_type)
|
||||
self.visit(node.declarator)
|
||||
|
||||
def visit_CNestedBaseTypeNode(self, node):
|
||||
self.visit(node.base_type)
|
||||
self.put(u'.')
|
||||
self.put(node.name)
|
||||
|
||||
def visit_TemplatedTypeNode(self, node):
|
||||
self.visit(node.base_type_node)
|
||||
self.put(u'[')
|
||||
self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CVarDefNode(self, node):
|
||||
self.startline(u"cdef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.comma_separated_list(node.declarators, output_rhs=True)
|
||||
self.endline()
|
||||
|
||||
def _visit_container_node(self, node, decl, extras, attributes):
|
||||
# TODO: visibility
|
||||
self.startline(decl)
|
||||
if node.name:
|
||||
self.put(u' ')
|
||||
self.put(node.name)
|
||||
if node.cname is not None:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if extras:
|
||||
self.put(extras)
|
||||
self.endline(':')
|
||||
self.indent()
|
||||
if not attributes:
|
||||
self.putline('pass')
|
||||
else:
|
||||
for attribute in attributes:
|
||||
self.visit(attribute)
|
||||
self.dedent()
|
||||
|
||||
def visit_CStructOrUnionDefNode(self, node):
|
||||
if node.typedef_flag:
|
||||
decl = u'ctypedef '
|
||||
else:
|
||||
decl = u'cdef '
|
||||
if node.visibility == 'public':
|
||||
decl += u'public '
|
||||
if node.packed:
|
||||
decl += u'packed '
|
||||
decl += node.kind
|
||||
self._visit_container_node(node, decl, None, node.attributes)
|
||||
|
||||
def visit_CppClassNode(self, node):
|
||||
extras = ""
|
||||
if node.templates:
|
||||
extras = u"[%s]" % ", ".join(node.templates)
|
||||
if node.base_classes:
|
||||
extras += "(%s)" % ", ".join(node.base_classes)
|
||||
self._visit_container_node(node, u"cdef cppclass", extras, node.attributes)
|
||||
|
||||
def visit_CEnumDefNode(self, node):
|
||||
self._visit_container_node(node, u"cdef enum", None, node.items)
|
||||
|
||||
def visit_CEnumDefItemNode(self, node):
|
||||
self.startline(node.name)
|
||||
if node.cname:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if node.value:
|
||||
self.put(u" = ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_CClassDefNode(self, node):
|
||||
assert not node.module_name
|
||||
if node.decorators:
|
||||
for decorator in node.decorators:
|
||||
self.visit(decorator)
|
||||
self.startline(u"cdef class ")
|
||||
self.put(node.class_name)
|
||||
if node.base_class_name:
|
||||
self.put(u"(")
|
||||
if node.base_class_module:
|
||||
self.put(node.base_class_module)
|
||||
self.put(u".")
|
||||
self.put(node.base_class_name)
|
||||
self.put(u")")
|
||||
self.endline(u":")
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_CTypeDefNode(self, node):
|
||||
self.startline(u"ctypedef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
self.endline()
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
# TODO: support cdef + cpdef functions
|
||||
self.startline(u"def %s(" % node.name)
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u"):")
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
self.startline(u'cpdef ' if node.overridable else u'cdef ')
|
||||
if node.modifiers:
|
||||
self.put(' '.join(node.modifiers))
|
||||
self.put(' ')
|
||||
if node.visibility != 'private':
|
||||
self.put(node.visibility)
|
||||
self.put(u' ')
|
||||
if node.api:
|
||||
self.put(u'api ')
|
||||
|
||||
if node.base_type:
|
||||
self.visit(node.base_type)
|
||||
if node.base_type.name is not None:
|
||||
self.put(u' ')
|
||||
|
||||
# visit the CFuncDeclaratorNode, but put a `:` at the end of line
|
||||
self.visit(node.declarator.base)
|
||||
self.put(u'(')
|
||||
self.comma_separated_list(node.declarator.args)
|
||||
self.endline(u'):')
|
||||
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_CArgDeclNode(self, node):
|
||||
# For "CSimpleBaseTypeNode", the variable type may have been parsed as type.
|
||||
# For other node types, the "name" is always None.
|
||||
if not isinstance(node.base_type, CSimpleBaseTypeNode) or \
|
||||
node.base_type.name is not None:
|
||||
self.visit(node.base_type)
|
||||
|
||||
# If we printed something for "node.base_type", we may need to print an extra ' '.
|
||||
#
|
||||
# Special case: if "node.declarator" is a "CNameDeclaratorNode",
|
||||
# its "name" might be an empty string, for example, for "cdef f(x)".
|
||||
if node.declarator.declared_name():
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
if node.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(node.default)
|
||||
|
||||
def visit_CImportStatNode(self, node):
|
||||
self.startline(u"cimport ")
|
||||
self.put(node.module_name)
|
||||
if node.as_name:
|
||||
self.put(u" as ")
|
||||
self.put(node.as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_FromCImportStatNode(self, node):
|
||||
self.startline(u"from ")
|
||||
self.put(node.module_name)
|
||||
self.put(u" cimport ")
|
||||
first = True
|
||||
for pos, name, as_name, kind in node.imported_names:
|
||||
assert kind is None
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
self.put(u", ")
|
||||
self.put(name)
|
||||
if as_name:
|
||||
self.put(u" as ")
|
||||
self.put(as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_DecoratorNode(self, node):
|
||||
self.startline("@")
|
||||
self.visit(node.decorator)
|
||||
self.endline()
|
||||
|
||||
def visit_PassStatNode(self, node):
|
||||
self.startline(u"pass")
|
||||
self.endline()
|
||||
|
||||
|
||||
class StatementWriter(DeclarationWriter):
|
||||
"""
|
||||
A Cython code writer for most language statement features.
|
||||
"""
|
||||
|
||||
def visit_SingleAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_CascadedAssignmentNode(self, node):
|
||||
self.startline()
|
||||
for lhs in node.lhs_list:
|
||||
self.visit(lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_PrintStatNode(self, node):
|
||||
self.startline(u"print ")
|
||||
self.comma_separated_list(node.arg_tuple.args)
|
||||
if not node.append_newline:
|
||||
self.put(u",")
|
||||
self.endline()
|
||||
|
||||
def visit_ForInStatNode(self, node):
|
||||
self.startline(u"for ")
|
||||
if node.target.is_sequence_constructor:
|
||||
self.comma_separated_list(node.target.args)
|
||||
else:
|
||||
self.visit(node.target)
|
||||
self.put(u" in ")
|
||||
self.visit(node.iterator.sequence)
|
||||
self.endline(u":")
|
||||
self._visit_indented(node.body)
|
||||
if node.else_clause is not None:
|
||||
self.line(u"else:")
|
||||
self._visit_indented(node.else_clause)
|
||||
|
||||
def visit_IfStatNode(self, node):
|
||||
# The IfClauseNode is handled directly without a separate match
|
||||
# for clariy.
|
||||
self.startline(u"if ")
|
||||
self.visit(node.if_clauses[0].condition)
|
||||
self.endline(":")
|
||||
self._visit_indented(node.if_clauses[0].body)
|
||||
for clause in node.if_clauses[1:]:
|
||||
self.startline("elif ")
|
||||
self.visit(clause.condition)
|
||||
self.endline(":")
|
||||
self._visit_indented(clause.body)
|
||||
if node.else_clause is not None:
|
||||
self.line("else:")
|
||||
self._visit_indented(node.else_clause)
|
||||
|
||||
def visit_WhileStatNode(self, node):
|
||||
self.startline(u"while ")
|
||||
self.visit(node.condition)
|
||||
self.endline(u":")
|
||||
self._visit_indented(node.body)
|
||||
if node.else_clause is not None:
|
||||
self.line("else:")
|
||||
self._visit_indented(node.else_clause)
|
||||
|
||||
def visit_ContinueStatNode(self, node):
|
||||
self.line(u"continue")
|
||||
|
||||
def visit_BreakStatNode(self, node):
|
||||
self.line(u"break")
|
||||
|
||||
def visit_SequenceNode(self, node):
|
||||
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
|
||||
|
||||
def visit_ExprStatNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.expr)
|
||||
self.endline()
|
||||
|
||||
def visit_InPlaceAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" %s= " % node.operator)
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_WithStatNode(self, node):
|
||||
self.startline()
|
||||
self.put(u"with ")
|
||||
self.visit(node.manager)
|
||||
if node.target is not None:
|
||||
self.put(u" as ")
|
||||
self.visit(node.target)
|
||||
self.endline(u":")
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_TryFinallyStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self._visit_indented(node.body)
|
||||
self.line(u"finally:")
|
||||
self._visit_indented(node.finally_clause)
|
||||
|
||||
def visit_TryExceptStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self._visit_indented(node.body)
|
||||
for x in node.except_clauses:
|
||||
self.visit(x)
|
||||
if node.else_clause is not None:
|
||||
self.visit(node.else_clause)
|
||||
|
||||
def visit_ExceptClauseNode(self, node):
|
||||
self.startline(u"except")
|
||||
if node.pattern is not None:
|
||||
self.put(u" ")
|
||||
self.visit(node.pattern)
|
||||
if node.target is not None:
|
||||
self.put(u", ")
|
||||
self.visit(node.target)
|
||||
self.endline(":")
|
||||
self._visit_indented(node.body)
|
||||
|
||||
def visit_ReturnStatNode(self, node):
|
||||
self.startline("return")
|
||||
if node.value is not None:
|
||||
self.put(u" ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_ReraiseStatNode(self, node):
|
||||
self.line("raise")
|
||||
|
||||
def visit_ImportNode(self, node):
|
||||
self.put(u"(import %s)" % node.module_name.value)
|
||||
|
||||
def visit_TempsBlockNode(self, node):
|
||||
"""
|
||||
Temporaries are output like $1_1', where the first number is
|
||||
an index of the TempsBlockNode and the second number is an index
|
||||
of the temporary which that block allocates.
|
||||
"""
|
||||
idx = 0
|
||||
for handle in node.temps:
|
||||
self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
|
||||
idx += 1
|
||||
self.tempblockindex += 1
|
||||
self.visit(node.body)
|
||||
|
||||
def visit_TempRefNode(self, node):
|
||||
self.put(self.tempnames[node.handle])
|
||||
|
||||
|
||||
class ExpressionWriter(TreeVisitor):
|
||||
"""
|
||||
A Cython code writer that is intentionally limited to expressions.
|
||||
"""
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(ExpressionWriter, self).__init__()
|
||||
if result is None:
|
||||
result = u""
|
||||
self.result = result
|
||||
self.precedence = [0]
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def put(self, s):
|
||||
self.result += s
|
||||
|
||||
def remove(self, s):
|
||||
if self.result.endswith(s):
|
||||
self.result = self.result[:-len(s)]
|
||||
|
||||
def comma_separated_list(self, items):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_IntNode(self, node):
|
||||
self.put(node.value)
|
||||
|
||||
def visit_FloatNode(self, node):
|
||||
self.put(node.value)
|
||||
|
||||
def visit_NoneNode(self, node):
|
||||
self.put(u"None")
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_EllipsisNode(self, node):
|
||||
self.put(u"...")
|
||||
|
||||
def visit_BoolNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ConstNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ImagNode(self, node):
|
||||
self.put(node.value)
|
||||
self.put(u"j")
|
||||
|
||||
def emit_string(self, node, prefix=u""):
|
||||
repr_val = repr(node.value)
|
||||
if repr_val[0] in 'ub':
|
||||
repr_val = repr_val[1:]
|
||||
self.put(u"%s%s" % (prefix, repr_val))
|
||||
|
||||
def visit_BytesNode(self, node):
|
||||
self.emit_string(node, u"b")
|
||||
|
||||
def visit_StringNode(self, node):
|
||||
self.emit_string(node)
|
||||
|
||||
def visit_UnicodeNode(self, node):
|
||||
self.emit_string(node, u"u")
|
||||
|
||||
def emit_sequence(self, node, parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
items = node.subexpr_nodes()
|
||||
self.put(open_paren)
|
||||
self.comma_separated_list(items)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ListNode(self, node):
|
||||
self.emit_sequence(node, u"[]")
|
||||
|
||||
def visit_TupleNode(self, node):
|
||||
self.emit_sequence(node, u"()")
|
||||
|
||||
def visit_SetNode(self, node):
|
||||
if len(node.subexpr_nodes()) > 0:
|
||||
self.emit_sequence(node, u"{}")
|
||||
else:
|
||||
self.put(u"set()")
|
||||
|
||||
def visit_DictNode(self, node):
|
||||
self.emit_sequence(node, u"{}")
|
||||
|
||||
def visit_DictItemNode(self, node):
|
||||
self.visit(node.key)
|
||||
self.put(u": ")
|
||||
self.visit(node.value)
|
||||
|
||||
unop_precedence = {
|
||||
'not': 3, '!': 3,
|
||||
'+': 11, '-': 11, '~': 11,
|
||||
}
|
||||
binop_precedence = {
|
||||
'or': 1,
|
||||
'and': 2,
|
||||
# unary: 'not': 3, '!': 3,
|
||||
'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
|
||||
'|': 5,
|
||||
'^': 6,
|
||||
'&': 7,
|
||||
'<<': 8, '>>': 8,
|
||||
'+': 9, '-': 9,
|
||||
'*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
|
||||
# unary: '+': 11, '-': 11, '~': 11
|
||||
'**': 12,
|
||||
}
|
||||
|
||||
def operator_enter(self, new_prec):
|
||||
old_prec = self.precedence[-1]
|
||||
if old_prec > new_prec:
|
||||
self.put(u"(")
|
||||
self.precedence.append(new_prec)
|
||||
|
||||
def operator_exit(self):
|
||||
old_prec, new_prec = self.precedence[-2:]
|
||||
if old_prec > new_prec:
|
||||
self.put(u")")
|
||||
self.precedence.pop()
|
||||
|
||||
def visit_NotNode(self, node):
|
||||
op = 'not'
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"not ")
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_UnopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"%s" % node.operator)
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.binop_precedence.get(op, 0)
|
||||
self.operator_enter(prec)
|
||||
self.visit(node.operand1)
|
||||
self.put(u" %s " % op.replace('_', ' '))
|
||||
self.visit(node.operand2)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BoolBinopNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_PrimaryCmpNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_IndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if isinstance(node.index, TupleNode):
|
||||
if node.index.subexpr_nodes():
|
||||
self.emit_sequence(node.index)
|
||||
else:
|
||||
self.put(u"()")
|
||||
else:
|
||||
self.visit(node.index)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceIndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if node.start:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if node.stop:
|
||||
self.visit(node.stop)
|
||||
if node.slice:
|
||||
self.put(u":")
|
||||
self.visit(node.slice)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceNode(self, node):
|
||||
if not node.start.is_none:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if not node.stop.is_none:
|
||||
self.visit(node.stop)
|
||||
if not node.step.is_none:
|
||||
self.put(u":")
|
||||
self.visit(node.step)
|
||||
|
||||
def visit_CondExprNode(self, node):
|
||||
self.visit(node.true_val)
|
||||
self.put(u" if ")
|
||||
self.visit(node.test)
|
||||
self.put(u" else ")
|
||||
self.visit(node.false_val)
|
||||
|
||||
def visit_AttributeNode(self, node):
|
||||
self.visit(node.obj)
|
||||
self.put(u".%s" % node.attribute)
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.comma_separated_list(node.args)
|
||||
self.put(")")
|
||||
|
||||
def emit_pos_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, AddNode):
|
||||
self.emit_pos_args(node.operand1)
|
||||
self.emit_pos_args(node.operand2)
|
||||
elif isinstance(node, TupleNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.visit(expr)
|
||||
self.put(u", ")
|
||||
elif isinstance(node, AsTupleNode):
|
||||
self.put("*")
|
||||
self.visit(node.arg)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def emit_kwd_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, MergedDictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.emit_kwd_args(expr)
|
||||
elif isinstance(node, DictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.put(u"%s=" % expr.key.value)
|
||||
self.visit(expr.value)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.put(u"**")
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def visit_GeneralCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.emit_pos_args(node.positional_args)
|
||||
self.emit_kwd_args(node.keyword_args)
|
||||
self.remove(u", ")
|
||||
self.put(")")
|
||||
|
||||
def emit_comprehension(self, body, target,
|
||||
sequence, condition,
|
||||
parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
self.put(open_paren)
|
||||
self.visit(body)
|
||||
self.put(u" for ")
|
||||
self.visit(target)
|
||||
self.put(u" in ")
|
||||
self.visit(sequence)
|
||||
if condition:
|
||||
self.put(u" if ")
|
||||
self.visit(condition)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ComprehensionAppendNode(self, node):
|
||||
self.visit(node.expr)
|
||||
|
||||
def visit_DictComprehensionAppendNode(self, node):
|
||||
self.visit(node.key_expr)
|
||||
self.put(u": ")
|
||||
self.visit(node.value_expr)
|
||||
|
||||
def visit_ComprehensionNode(self, node):
|
||||
tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
|
||||
parens = tpmap[node.type.py_type_name()]
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body
|
||||
self.emit_comprehension(body, target, sequence, condition, parens)
|
||||
|
||||
def visit_GeneratorExpressionNode(self, node):
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body.expr.arg
|
||||
elif hasattr(body, 'expr'):
|
||||
# type(body) is Nodes.ExprStatNode
|
||||
body = body.expr.arg
|
||||
self.emit_comprehension(body, target, sequence, condition, u"()")
|
||||
|
||||
|
||||
class PxdWriter(DeclarationWriter, ExpressionWriter):
|
||||
"""
|
||||
A Cython code writer for everything supported in pxd files.
|
||||
(currently unused)
|
||||
"""
|
||||
|
||||
def __call__(self, node):
|
||||
print(u'\n'.join(self.write(node).lines))
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if node.overridable:
|
||||
self.startline(u'cpdef ')
|
||||
else:
|
||||
self.startline(u'cdef ')
|
||||
if node.modifiers:
|
||||
self.put(' '.join(node.modifiers))
|
||||
self.put(' ')
|
||||
if node.visibility != 'private':
|
||||
self.put(node.visibility)
|
||||
self.put(u' ')
|
||||
if node.api:
|
||||
self.put(u'api ')
|
||||
self.visit(node.declarator)
|
||||
|
||||
def visit_StatNode(self, node):
|
||||
pass
|
||||
|
||||
|
||||
class CodeWriter(StatementWriter, ExpressionWriter):
|
||||
"""
|
||||
A complete Cython code writer.
|
||||
"""
|
||||
@ -0,0 +1,99 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import ScopeTrackingTransform
|
||||
from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
|
||||
from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
|
||||
from .PyrexTypes import py_object_type
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Symtab
|
||||
|
||||
class AutoTestDictTransform(ScopeTrackingTransform):
|
||||
# Handles autotestdict directive
|
||||
|
||||
excludelist = ['__cinit__', '__dealloc__', '__richcmp__',
|
||||
'__nonzero__', '__bool__',
|
||||
'__len__', '__contains__']
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
if node.is_pxd:
|
||||
return node
|
||||
self.scope_type = 'module'
|
||||
self.scope_node = node
|
||||
|
||||
if not self.current_directives['autotestdict']:
|
||||
return node
|
||||
self.all_docstrings = self.current_directives['autotestdict.all']
|
||||
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
|
||||
|
||||
assert isinstance(node.body, StatListNode)
|
||||
|
||||
# First see if __test__ is already created
|
||||
if u'__test__' in node.scope.entries:
|
||||
# Do nothing
|
||||
return node
|
||||
|
||||
pos = node.pos
|
||||
|
||||
self.tests = []
|
||||
self.testspos = node.pos
|
||||
|
||||
test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'),
|
||||
py_object_type,
|
||||
pos,
|
||||
visibility='public')
|
||||
create_test_dict_assignment = SingleAssignmentNode(pos,
|
||||
lhs=NameNode(pos, name=EncodedString(u'__test__'),
|
||||
entry=test_dict_entry),
|
||||
rhs=DictNode(pos, key_value_pairs=self.tests))
|
||||
self.visitchildren(node)
|
||||
node.body.stats.append(create_test_dict_assignment)
|
||||
return node
|
||||
|
||||
def add_test(self, testpos, path, doctest):
|
||||
pos = self.testspos
|
||||
keystr = u'%s (line %d)' % (path, testpos[1])
|
||||
key = UnicodeNode(pos, value=EncodedString(keystr))
|
||||
value = UnicodeNode(pos, value=doctest)
|
||||
self.tests.append(DictItemNode(pos, key=key, value=value))
|
||||
|
||||
def visit_ExprNode(self, node):
|
||||
# expressions cannot contain functions and lambda expressions
|
||||
# do not have a docstring
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
|
||||
return node
|
||||
if not self.cdef_docstrings:
|
||||
if isinstance(node, CFuncDefNode) and not node.py_func:
|
||||
return node
|
||||
if not self.all_docstrings and '>>>' not in node.doc:
|
||||
return node
|
||||
|
||||
pos = self.testspos
|
||||
if self.scope_type == 'module':
|
||||
path = node.entry.name
|
||||
elif self.scope_type in ('pyclass', 'cclass'):
|
||||
if isinstance(node, CFuncDefNode):
|
||||
if node.py_func is not None:
|
||||
name = node.py_func.name
|
||||
else:
|
||||
name = node.entry.name
|
||||
else:
|
||||
name = node.name
|
||||
if self.scope_type == 'cclass' and name in self.excludelist:
|
||||
return node
|
||||
if self.scope_type == 'pyclass':
|
||||
class_name = self.scope_node.name
|
||||
else:
|
||||
class_name = self.scope_node.class_name
|
||||
if isinstance(node.entry.scope, Symtab.PropertyScope):
|
||||
property_method_name = node.entry.scope.name
|
||||
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
|
||||
node.entry.name)
|
||||
else:
|
||||
path = "%s.%s" % (class_name, node.entry.name)
|
||||
else:
|
||||
assert False
|
||||
self.add_test(node.pos, path, node.doc)
|
||||
return node
|
||||
341
venv/lib/python3.11/site-packages/Cython/Compiler/Annotate.py
Normal file
341
venv/lib/python3.11/site-packages/Cython/Compiler/Annotate.py
Normal file
@ -0,0 +1,341 @@
|
||||
# Note: Work in progress
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import codecs
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from collections import defaultdict
|
||||
from xml.sax.saxutils import escape as html_escape
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO # does not support writing 'str' in Py2
|
||||
|
||||
from . import Version
|
||||
from .Code import CCodeWriter
|
||||
from .. import Utils
|
||||
|
||||
|
||||
class AnnotationCCodeWriter(CCodeWriter):
|
||||
|
||||
# also used as marker for detection of complete code emission in tests
|
||||
COMPLETE_CODE_TITLE = "Complete cythonized code"
|
||||
|
||||
def __init__(self, create_from=None, buffer=None, copy_formatting=True, show_entire_c_code=False, source_desc=None):
|
||||
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting)
|
||||
self.show_entire_c_code = show_entire_c_code
|
||||
if create_from is None:
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = None
|
||||
# annotations[filename][line] -> [(column, AnnotationItem)*]
|
||||
self.annotations = defaultdict(partial(defaultdict, list))
|
||||
# code[filename][line] -> str
|
||||
self.code = defaultdict(partial(defaultdict, str))
|
||||
# scopes[filename][line] -> set(scopes)
|
||||
self.scopes = defaultdict(partial(defaultdict, set))
|
||||
else:
|
||||
# When creating an insertion point, keep references to the same database
|
||||
self.annotation_buffer = create_from.annotation_buffer
|
||||
self.annotations = create_from.annotations
|
||||
self.code = create_from.code
|
||||
self.scopes = create_from.scopes
|
||||
self.last_annotated_pos = create_from.last_annotated_pos
|
||||
|
||||
def create_new(self, create_from, buffer, copy_formatting):
|
||||
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
|
||||
|
||||
def _write_to_buffer(self, s):
|
||||
self.buffer.write(s)
|
||||
self.annotation_buffer.write(s)
|
||||
|
||||
def mark_pos(self, pos, trace=True):
|
||||
if pos is not None:
|
||||
CCodeWriter.mark_pos(self, pos, trace)
|
||||
if self.funcstate and self.funcstate.scope:
|
||||
# lambdas and genexprs can result in multiple scopes per line => keep them in a set
|
||||
self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope)
|
||||
if self.last_annotated_pos:
|
||||
source_desc, line, _ = self.last_annotated_pos
|
||||
pos_code = self.code[source_desc.filename]
|
||||
pos_code[line] += self.annotation_buffer.getvalue()
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = pos
|
||||
|
||||
def annotate(self, pos, item):
|
||||
self.annotations[pos[0].filename][pos[1]].append((pos[2], item))
|
||||
|
||||
def _css(self):
|
||||
"""css template will later allow to choose a colormap"""
|
||||
css = [self._css_template]
|
||||
for i in range(255):
|
||||
color = u"FFFF%02x" % int(255.0 // (1.0 + i/10.0))
|
||||
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
|
||||
try:
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
css.append(HtmlFormatter().get_style_defs('.cython'))
|
||||
return '\n'.join(css)
|
||||
|
||||
_css_template = textwrap.dedent("""
|
||||
body.cython { font-family: courier; font-size: 12; }
|
||||
|
||||
.cython.tag { }
|
||||
.cython.line { color: #000000; margin: 0em }
|
||||
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
||||
|
||||
.cython.line .run { background-color: #B0FFB0; }
|
||||
.cython.line .mis { background-color: #FFB0B0; }
|
||||
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
||||
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
||||
|
||||
.cython.code .py_c_api { color: red; }
|
||||
.cython.code .py_macro_api { color: #FF7000; }
|
||||
.cython.code .pyx_c_api { color: #FF3000; }
|
||||
.cython.code .pyx_macro_api { color: #FF7000; }
|
||||
.cython.code .refnanny { color: #FFA000; }
|
||||
.cython.code .trace { color: #FFA000; }
|
||||
.cython.code .error_goto { color: #FFA000; }
|
||||
|
||||
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
||||
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_attr { color: #0000FF; }
|
||||
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_call { color: #0000FF; }
|
||||
""")
|
||||
|
||||
# on-click toggle function to show/hide C source code
|
||||
_onclick_attr = ' onclick="{0}"'.format((
|
||||
"(function(s){"
|
||||
" s.display = s.display === 'block' ? 'none' : 'block'"
|
||||
"})(this.nextElementSibling.style)"
|
||||
).replace(' ', '') # poor dev's JS minification
|
||||
)
|
||||
|
||||
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
|
||||
with Utils.open_source_file(source_filename) as f:
|
||||
code = f.read()
|
||||
generated_code = self.code.get(source_filename, {})
|
||||
c_file = Utils.decode_filename(os.path.basename(target_filename))
|
||||
html_filename = os.path.splitext(target_filename)[0] + ".html"
|
||||
|
||||
with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer:
|
||||
out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml))
|
||||
|
||||
def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None):
|
||||
coverage_info = ''
|
||||
if coverage_timestamp:
|
||||
coverage_info = u' with coverage data from {timestamp}'.format(
|
||||
timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000))
|
||||
|
||||
outlist = [
|
||||
textwrap.dedent(u'''\
|
||||
<!DOCTYPE html>
|
||||
<!-- Generated by Cython {watermark} -->
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Cython: {filename}</title>
|
||||
<style type="text/css">
|
||||
{css}
|
||||
</style>
|
||||
</head>
|
||||
<body class="cython">
|
||||
<p><span style="border-bottom: solid 1px grey;">Generated by Cython {watermark}</span>{more_info}</p>
|
||||
<p>
|
||||
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
||||
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
||||
</p>
|
||||
''').format(css=self._css(), watermark=Version.watermark,
|
||||
filename=os.path.basename(source_filename) if source_filename else '',
|
||||
more_info=coverage_info)
|
||||
]
|
||||
if c_file:
|
||||
outlist.append(u'<p>Raw output: <a href="%s">%s</a></p>\n' % (c_file, c_file))
|
||||
return outlist
|
||||
|
||||
def _save_annotation_footer(self):
|
||||
return (u'</body></html>\n',)
|
||||
|
||||
def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None):
|
||||
"""
|
||||
lines : original cython source code split by lines
|
||||
generated_code : generated c code keyed by line number in original file
|
||||
target filename : name of the file in which to store the generated html
|
||||
c_file : filename in which the c_code has been written
|
||||
"""
|
||||
if coverage_xml is not None and source_filename:
|
||||
coverage_timestamp = coverage_xml.get('timestamp', '').strip()
|
||||
covered_lines = self._get_line_coverage(coverage_xml, source_filename)
|
||||
else:
|
||||
coverage_timestamp = covered_lines = None
|
||||
annotation_items = dict(self.annotations[source_filename])
|
||||
scopes = dict(self.scopes[source_filename])
|
||||
|
||||
outlist = []
|
||||
outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp))
|
||||
outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines))
|
||||
outlist.extend(self._save_annotation_footer())
|
||||
return ''.join(outlist)
|
||||
|
||||
def _get_line_coverage(self, coverage_xml, source_filename):
|
||||
coverage_data = None
|
||||
for entry in coverage_xml.iterfind('.//class'):
|
||||
if not entry.get('filename'):
|
||||
continue
|
||||
if (entry.get('filename') == source_filename or
|
||||
os.path.abspath(entry.get('filename')) == source_filename):
|
||||
coverage_data = entry
|
||||
break
|
||||
elif source_filename.endswith(entry.get('filename')):
|
||||
coverage_data = entry # but we might still find a better match...
|
||||
if coverage_data is None:
|
||||
return None
|
||||
return dict(
|
||||
(int(line.get('number')), int(line.get('hits')))
|
||||
for line in coverage_data.iterfind('lines/line')
|
||||
)
|
||||
|
||||
def _htmlify_code(self, code, language):
|
||||
try:
|
||||
from pygments import highlight
|
||||
from pygments.lexers import CythonLexer, CppLexer
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
# no Pygments, just escape the code
|
||||
return html_escape(code)
|
||||
|
||||
if language == "cython":
|
||||
lexer = CythonLexer(stripnl=False, stripall=False)
|
||||
elif language == "c/cpp":
|
||||
lexer = CppLexer(stripnl=False, stripall=False)
|
||||
else:
|
||||
# unknown language, use fallback
|
||||
return html_escape(code)
|
||||
html_code = highlight(
|
||||
code, lexer,
|
||||
HtmlFormatter(nowrap=True))
|
||||
return html_code
|
||||
|
||||
def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None):
|
||||
outlist = [u'<div class="cython">']
|
||||
pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
|
||||
new_calls_map = dict(
|
||||
(name, 0) for name in
|
||||
'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split()
|
||||
).copy
|
||||
|
||||
self.mark_pos(None)
|
||||
|
||||
def annotate(match):
|
||||
group_name = match.lastgroup
|
||||
calls[group_name] += 1
|
||||
return u"<span class='%s'>%s</span>" % (
|
||||
group_name, match.group(group_name))
|
||||
|
||||
lines = self._htmlify_code(cython_code, "cython").splitlines()
|
||||
lineno_width = len(str(len(lines)))
|
||||
if not covered_lines:
|
||||
covered_lines = None
|
||||
|
||||
for k, line in enumerate(lines, 1):
|
||||
try:
|
||||
c_code = generated_code[k]
|
||||
except KeyError:
|
||||
c_code = ''
|
||||
else:
|
||||
c_code = _replace_pos_comment(pos_comment_marker, c_code)
|
||||
if c_code.startswith(pos_comment_marker):
|
||||
c_code = c_code[len(pos_comment_marker):]
|
||||
c_code = html_escape(c_code)
|
||||
|
||||
calls = new_calls_map()
|
||||
c_code = _parse_code(annotate, c_code)
|
||||
score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
|
||||
calls['py_macro_api'] + calls['pyx_macro_api'])
|
||||
|
||||
if c_code:
|
||||
onclick = self._onclick_attr
|
||||
expandsymbol = '+'
|
||||
else:
|
||||
onclick = ''
|
||||
expandsymbol = ' '
|
||||
|
||||
covered = ''
|
||||
if covered_lines is not None and k in covered_lines:
|
||||
hits = covered_lines[k]
|
||||
if hits is not None:
|
||||
covered = 'run' if hits else 'mis'
|
||||
|
||||
outlist.append(
|
||||
u'<pre class="cython line score-{score}"{onclick}>'
|
||||
# generate line number with expand symbol in front,
|
||||
# and the right number of digit
|
||||
u'{expandsymbol}<span class="{covered}">{line:0{lineno_width}d}</span>: {code}</pre>\n'.format(
|
||||
score=score,
|
||||
expandsymbol=expandsymbol,
|
||||
covered=covered,
|
||||
lineno_width=lineno_width,
|
||||
line=k,
|
||||
code=line.rstrip(),
|
||||
onclick=onclick,
|
||||
))
|
||||
if c_code:
|
||||
outlist.append(u"<pre class='cython code score-{score} {covered}'>{code}</pre>".format(
|
||||
score=score, covered=covered, code=c_code))
|
||||
outlist.append(u"</div>")
|
||||
|
||||
# now the whole c-code if needed:
|
||||
if self.show_entire_c_code:
|
||||
outlist.append(u'<p><div class="cython">')
|
||||
onclick_title = u"<pre class='cython line'{onclick}>+ {title}</pre>\n"
|
||||
outlist.append(onclick_title.format(
|
||||
onclick=self._onclick_attr,
|
||||
title=AnnotationCCodeWriter.COMPLETE_CODE_TITLE,
|
||||
))
|
||||
complete_code_as_html = self._htmlify_code(self.buffer.getvalue(), "c/cpp")
|
||||
outlist.append(u"<pre class='cython code'>{code}</pre>".format(code=complete_code_as_html))
|
||||
outlist.append(u"</div></p>")
|
||||
|
||||
return outlist
|
||||
|
||||
|
||||
_parse_code = re.compile((
|
||||
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
|
||||
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
|
||||
br'(?:'
|
||||
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
|
||||
br'(?P<pyx_c_api>(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|'
|
||||
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
|
||||
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
|
||||
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
|
||||
br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
|
||||
).decode('ascii')).sub
|
||||
|
||||
|
||||
_replace_pos_comment = re.compile(
|
||||
# this matches what Cython generates as code line marker comment
|
||||
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
|
||||
re.M
|
||||
).sub
|
||||
|
||||
|
||||
class AnnotationItem(object):
|
||||
|
||||
def __init__(self, style, text, tag="", size=0):
|
||||
self.style = style
|
||||
self.text = text
|
||||
self.tag = tag
|
||||
self.size = size
|
||||
|
||||
def start(self):
|
||||
return u"<span class='cython tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
|
||||
|
||||
def end(self):
|
||||
return self.size, u"</span>"
|
||||
@ -0,0 +1,318 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Visitor import CythonTransform
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Options
|
||||
from . import PyrexTypes
|
||||
from ..CodeWriter import ExpressionWriter
|
||||
from .Errors import warning
|
||||
|
||||
|
||||
class AnnotationWriter(ExpressionWriter):
|
||||
"""
|
||||
A Cython code writer for Python expressions in argument/variable annotations.
|
||||
"""
|
||||
def __init__(self, description=None):
|
||||
"""description is optional. If specified it is used in
|
||||
warning messages for the nodes that don't convert to string properly.
|
||||
If not specified then no messages are generated.
|
||||
"""
|
||||
ExpressionWriter.__init__(self)
|
||||
self.description = description
|
||||
self.incomplete = False
|
||||
|
||||
def visit_Node(self, node):
|
||||
self.put(u"<???>")
|
||||
self.incomplete = True
|
||||
if self.description:
|
||||
warning(node.pos,
|
||||
"Failed to convert code to string representation in {0}".format(
|
||||
self.description), level=1)
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# XXX Should we do better?
|
||||
self.put("<lambda>")
|
||||
self.incomplete = True
|
||||
if self.description:
|
||||
warning(node.pos,
|
||||
"Failed to convert lambda to string representation in {0}".format(
|
||||
self.description), level=1)
|
||||
|
||||
def visit_UnicodeNode(self, node):
|
||||
# Discard Unicode prefix in annotations. Any tool looking at them
|
||||
# would probably expect Py3 string semantics.
|
||||
self.emit_string(node, "")
|
||||
|
||||
def visit_AnnotationNode(self, node):
|
||||
self.put(node.string.unicode_value)
|
||||
|
||||
|
||||
class EmbedSignature(CythonTransform):
|
||||
|
||||
def __init__(self, context):
|
||||
super(EmbedSignature, self).__init__(context)
|
||||
self.class_name = None
|
||||
self.class_node = None
|
||||
|
||||
def _fmt_expr(self, node):
|
||||
writer = ExpressionWriter()
|
||||
result = writer.write(node)
|
||||
# print(type(node).__name__, '-->', result)
|
||||
return result
|
||||
|
||||
def _fmt_annotation(self, node):
|
||||
writer = AnnotationWriter()
|
||||
result = writer.write(node)
|
||||
# print(type(node).__name__, '-->', result)
|
||||
return result
|
||||
|
||||
def _setup_format(self):
|
||||
signature_format = self.current_directives['embedsignature.format']
|
||||
self.is_format_c = signature_format == 'c'
|
||||
self.is_format_python = signature_format == 'python'
|
||||
self.is_format_clinic = signature_format == 'clinic'
|
||||
|
||||
def _fmt_arg(self, arg):
|
||||
arg_doc = arg.name
|
||||
annotation = None
|
||||
defaultval = None
|
||||
if arg.is_self_arg:
|
||||
if self.is_format_clinic:
|
||||
arg_doc = '$self'
|
||||
elif arg.is_type_arg:
|
||||
if self.is_format_clinic:
|
||||
arg_doc = '$type'
|
||||
elif self.is_format_c:
|
||||
if arg.type is not PyrexTypes.py_object_type:
|
||||
arg_doc = arg.type.declaration_code(arg.name, for_display=1)
|
||||
elif self.is_format_python:
|
||||
if not arg.annotation:
|
||||
annotation = self._fmt_type(arg.type)
|
||||
if arg.annotation:
|
||||
if not self.is_format_clinic:
|
||||
annotation = self._fmt_annotation(arg.annotation)
|
||||
if arg.default:
|
||||
defaultval = self._fmt_expr(arg.default)
|
||||
if annotation:
|
||||
arg_doc = arg_doc + (': %s' % annotation)
|
||||
if defaultval:
|
||||
arg_doc = arg_doc + (' = %s' % defaultval)
|
||||
elif defaultval:
|
||||
arg_doc = arg_doc + ('=%s' % defaultval)
|
||||
return arg_doc
|
||||
|
||||
def _fmt_star_arg(self, arg):
|
||||
arg_doc = arg.name
|
||||
if arg.annotation:
|
||||
if not self.is_format_clinic:
|
||||
annotation = self._fmt_annotation(arg.annotation)
|
||||
arg_doc = arg_doc + (': %s' % annotation)
|
||||
return arg_doc
|
||||
|
||||
def _fmt_arglist(self, args,
|
||||
npoargs=0, npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
hide_self=False):
|
||||
arglist = []
|
||||
for arg in args:
|
||||
if not hide_self or not arg.entry.is_self_arg:
|
||||
arg_doc = self._fmt_arg(arg)
|
||||
arglist.append(arg_doc)
|
||||
if pargs:
|
||||
arg_doc = self._fmt_star_arg(pargs)
|
||||
arglist.insert(npargs + npoargs, '*%s' % arg_doc)
|
||||
elif nkargs:
|
||||
arglist.insert(npargs + npoargs, '*')
|
||||
if npoargs:
|
||||
arglist.insert(npoargs, '/')
|
||||
if kargs:
|
||||
arg_doc = self._fmt_star_arg(kargs)
|
||||
arglist.append('**%s' % arg_doc)
|
||||
return arglist
|
||||
|
||||
def _fmt_type(self, type):
|
||||
if type is PyrexTypes.py_object_type:
|
||||
return None
|
||||
elif self.is_format_c:
|
||||
code = type.declaration_code("", for_display=1)
|
||||
return code
|
||||
elif self.is_format_python:
|
||||
annotation = None
|
||||
if type.is_string:
|
||||
annotation = self.current_directives['c_string_type']
|
||||
elif type.is_numeric:
|
||||
annotation = type.py_type_name()
|
||||
if annotation is None:
|
||||
code = type.declaration_code('', for_display=1)
|
||||
annotation = code.replace(' ', '_').replace('*', 'p')
|
||||
return annotation
|
||||
return None
|
||||
|
||||
def _fmt_signature(self, cls_name, func_name, args,
|
||||
npoargs=0, npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
return_expr=None, return_type=None,
|
||||
hide_self=False):
|
||||
arglist = self._fmt_arglist(
|
||||
args, npoargs, npargs, pargs, nkargs, kargs,
|
||||
hide_self=hide_self,
|
||||
)
|
||||
arglist_doc = ', '.join(arglist)
|
||||
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
||||
if self.is_format_c and cls_name:
|
||||
func_doc = '%s.%s' % (cls_name, func_doc)
|
||||
if not self.is_format_clinic:
|
||||
ret_doc = None
|
||||
if return_expr:
|
||||
ret_doc = self._fmt_annotation(return_expr)
|
||||
elif return_type:
|
||||
ret_doc = self._fmt_type(return_type)
|
||||
if ret_doc:
|
||||
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
||||
return func_doc
|
||||
|
||||
def _embed_signature(self, signature, node_doc):
|
||||
if self.is_format_clinic and self.current_directives['binding']:
|
||||
return node_doc
|
||||
if node_doc:
|
||||
if self.is_format_clinic:
|
||||
docfmt = "%s\n--\n\n%s"
|
||||
else:
|
||||
docfmt = "%s\n%s"
|
||||
return docfmt % (signature, node_doc)
|
||||
else:
|
||||
if self.is_format_clinic:
|
||||
docfmt = "%s\n--\n\n"
|
||||
else:
|
||||
docfmt = "%s"
|
||||
return docfmt % signature
|
||||
|
||||
def __call__(self, node):
|
||||
if not Options.docstrings:
|
||||
return node
|
||||
else:
|
||||
return super(EmbedSignature, self).__call__(node)
|
||||
|
||||
def visit_ClassDefNode(self, node):
|
||||
oldname = self.class_name
|
||||
oldclass = self.class_node
|
||||
self.class_node = node
|
||||
try:
|
||||
# PyClassDefNode
|
||||
self.class_name = node.name
|
||||
except AttributeError:
|
||||
# CClassDefNode
|
||||
self.class_name = node.class_name
|
||||
self.visitchildren(node)
|
||||
self.class_name = oldname
|
||||
self.class_node = oldclass
|
||||
return node
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# lambda expressions so not have signature or inner functions
|
||||
return node
|
||||
|
||||
def visit_DefNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
self._setup_format()
|
||||
|
||||
is_constructor = False
|
||||
hide_self = False
|
||||
if node.entry.is_special:
|
||||
is_constructor = self.class_node and node.name == '__init__'
|
||||
if not is_constructor:
|
||||
return node
|
||||
class_name = None
|
||||
func_name = node.name
|
||||
if self.is_format_c:
|
||||
func_name = self.class_name
|
||||
hide_self = True
|
||||
else:
|
||||
class_name, func_name = self.class_name, node.name
|
||||
|
||||
npoargs = getattr(node, 'num_posonly_args', 0)
|
||||
nkargs = getattr(node, 'num_kwonly_args', 0)
|
||||
npargs = len(node.args) - nkargs - npoargs
|
||||
signature = self._fmt_signature(
|
||||
class_name, func_name, node.args,
|
||||
npoargs, npargs, node.star_arg,
|
||||
nkargs, node.starstar_arg,
|
||||
return_expr=node.return_type_annotation,
|
||||
return_type=None, hide_self=hide_self)
|
||||
if signature:
|
||||
if is_constructor and self.is_format_c:
|
||||
doc_holder = self.class_node.entry.type.scope
|
||||
else:
|
||||
doc_holder = node.entry
|
||||
if doc_holder.doc is not None:
|
||||
old_doc = doc_holder.doc
|
||||
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
doc_holder.doc = EncodedString(new_doc)
|
||||
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
node.py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if not node.overridable: # not cpdef FOO(...):
|
||||
return node
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
self._setup_format()
|
||||
|
||||
signature = self._fmt_signature(
|
||||
self.class_name, node.declarator.base.name,
|
||||
node.declarator.args,
|
||||
return_type=node.return_type)
|
||||
if signature:
|
||||
if node.entry.doc is not None:
|
||||
old_doc = node.entry.doc
|
||||
elif getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
node.entry.doc = EncodedString(new_doc)
|
||||
py_func = getattr(node, 'py_func', None)
|
||||
if py_func is not None:
|
||||
py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_PropertyNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
self._setup_format()
|
||||
|
||||
entry = node.entry
|
||||
body = node.body
|
||||
prop_name = entry.name
|
||||
type_name = None
|
||||
if entry.visibility == 'public':
|
||||
if self.is_format_c:
|
||||
# property synthesised from a cdef public attribute
|
||||
type_name = entry.type.declaration_code("", for_display=1)
|
||||
if not entry.type.is_pyobject:
|
||||
type_name = "'%s'" % type_name
|
||||
elif entry.type.is_extension_type:
|
||||
type_name = entry.type.module_name + '.' + type_name
|
||||
elif self.is_format_python:
|
||||
type_name = self._fmt_type(entry.type)
|
||||
if type_name is None:
|
||||
for stat in body.stats:
|
||||
if stat.name != '__get__':
|
||||
continue
|
||||
if self.is_format_c:
|
||||
prop_name = '%s.%s' % (self.class_name, prop_name)
|
||||
ret_annotation = stat.return_type_annotation
|
||||
if ret_annotation:
|
||||
type_name = self._fmt_annotation(ret_annotation)
|
||||
if type_name is not None :
|
||||
signature = '%s: %s' % (prop_name, type_name)
|
||||
new_doc = self._embed_signature(signature, entry.doc)
|
||||
if not self.is_format_clinic:
|
||||
entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user