first commit
This commit is contained in:
commit
417e54da96
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.buildozer
|
BIN
bin/myapp-0.1-arm64-v8a_armeabi-v7a-debug.apk
Normal file
BIN
bin/myapp-0.1-arm64-v8a_armeabi-v7a-debug.apk
Normal file
Binary file not shown.
452
buildozer.spec
Normal file
452
buildozer.spec
Normal file
@ -0,0 +1,452 @@
|
||||
[app]
|
||||
|
||||
# (str) Title of your application
|
||||
title = My Application
|
||||
|
||||
# (str) Package name
|
||||
package.name = myapp
|
||||
|
||||
# (str) Package domain (needed for android/ios packaging)
|
||||
package.domain = org.test
|
||||
|
||||
# (str) Source code where the main.py live
|
||||
source.dir = .
|
||||
|
||||
# (list) Source files to include (let empty to include all the files)
|
||||
source.include_exts = py,png,jpg,kv,atlas
|
||||
|
||||
# (list) List of inclusions using pattern matching
|
||||
#source.include_patterns = assets/*,images/*.png
|
||||
|
||||
# (list) Source files to exclude (let empty to not exclude anything)
|
||||
#source.exclude_exts = spec
|
||||
|
||||
# (list) List of directory to exclude (let empty to not exclude anything)
|
||||
#source.exclude_dirs = tests, bin, venv
|
||||
|
||||
# (list) List of exclusions using pattern matching
|
||||
# Do not prefix with './'
|
||||
#source.exclude_patterns = license,images/*/*.jpg
|
||||
|
||||
# (str) Application versioning (method 1)
|
||||
version = 0.1
|
||||
|
||||
# (str) Application versioning (method 2)
|
||||
# version.regex = __version__ = ['"](.*)['"]
|
||||
# version.filename = %(source.dir)s/main.py
|
||||
|
||||
# (list) Application requirements
|
||||
# comma separated e.g. requirements = sqlite3,kivy
|
||||
requirements = python3,kivy
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
# requirements.source.kivy = ../../kivy
|
||||
|
||||
# (str) Presplash of the application
|
||||
#presplash.filename = %(source.dir)s/data/presplash.png
|
||||
|
||||
# (str) Icon of the application
|
||||
#icon.filename = %(source.dir)s/data/icon.png
|
||||
|
||||
# (list) Supported orientations
|
||||
# Valid options are: landscape, portrait, portrait-reverse or landscape-reverse
|
||||
orientation = portrait
|
||||
|
||||
# (list) List of service to declare
|
||||
#services = NAME:ENTRYPOINT_TO_PY,NAME2:ENTRYPOINT2_TO_PY
|
||||
|
||||
#
|
||||
# OSX Specific
|
||||
#
|
||||
|
||||
#
|
||||
# author = © Copyright Info
|
||||
|
||||
# change the major version of python used by the app
|
||||
osx.python_version = 3
|
||||
|
||||
# Kivy version to use
|
||||
osx.kivy_version = 1.9.1
|
||||
|
||||
#
|
||||
# Android specific
|
||||
#
|
||||
|
||||
# (bool) Indicate if the application should be fullscreen or not
|
||||
fullscreen = 0
|
||||
|
||||
# (string) Presplash background color (for android toolchain)
|
||||
# Supported formats are: #RRGGBB #AARRGGBB or one of the following names:
|
||||
# red, blue, green, black, white, gray, cyan, magenta, yellow, lightgray,
|
||||
# darkgray, grey, lightgrey, darkgrey, aqua, fuchsia, lime, maroon, navy,
|
||||
# olive, purple, silver, teal.
|
||||
#android.presplash_color = #FFFFFF
|
||||
|
||||
# (string) Presplash animation using Lottie format.
|
||||
# see https://lottiefiles.com/ for examples and https://airbnb.design/lottie/
|
||||
# for general documentation.
|
||||
# Lottie files can be created using various tools, like Adobe After Effect or Synfig.
|
||||
#android.presplash_lottie = "path/to/lottie/file.json"
|
||||
|
||||
# (str) Adaptive icon of the application (used if Android API level is 26+ at runtime)
|
||||
#icon.adaptive_foreground.filename = %(source.dir)s/data/icon_fg.png
|
||||
#icon.adaptive_background.filename = %(source.dir)s/data/icon_bg.png
|
||||
|
||||
# (list) Permissions
|
||||
# (See https://python-for-android.readthedocs.io/en/latest/buildoptions/#build-options-1 for all the supported syntaxes and properties)
|
||||
#android.permissions = android.permission.INTERNET, (name=android.permission.WRITE_EXTERNAL_STORAGE;maxSdkVersion=18)
|
||||
|
||||
# (list) features (adds uses-feature -tags to manifest)
|
||||
#android.features = android.hardware.usb.host
|
||||
|
||||
# (int) Target Android API, should be as high as possible.
|
||||
#android.api = 31
|
||||
|
||||
# (int) Minimum API your APK / AAB will support.
|
||||
#android.minapi = 21
|
||||
|
||||
# (int) Android SDK version to use
|
||||
#android.sdk = 20
|
||||
|
||||
# (str) Android NDK version to use
|
||||
#android.ndk = 23b
|
||||
|
||||
# (int) Android NDK API to use. This is the minimum API your app will support, it should usually match android.minapi.
|
||||
#android.ndk_api = 21
|
||||
|
||||
# (bool) Use --private data storage (True) or --dir public storage (False)
|
||||
#android.private_storage = True
|
||||
|
||||
# (str) Android NDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.ndk_path =
|
||||
|
||||
# (str) Android SDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.sdk_path =
|
||||
|
||||
# (str) ANT directory (if empty, it will be automatically downloaded.)
|
||||
#android.ant_path =
|
||||
|
||||
# (bool) If True, then skip trying to update the Android sdk
|
||||
# This can be useful to avoid excess Internet downloads or save time
|
||||
# when an update is due and you just want to test/build your package
|
||||
# android.skip_update = False
|
||||
|
||||
# (bool) If True, then automatically accept SDK license
|
||||
# agreements. This is intended for automation only. If set to False,
|
||||
# the default, you will be shown the license when first running
|
||||
# buildozer.
|
||||
# android.accept_sdk_license = False
|
||||
|
||||
# (str) Android entry point, default is ok for Kivy-based app
|
||||
#android.entrypoint = org.kivy.android.PythonActivity
|
||||
|
||||
# (str) Full name including package path of the Java class that implements Android Activity
|
||||
# use that parameter together with android.entrypoint to set custom Java class instead of PythonActivity
|
||||
#android.activity_class_name = org.kivy.android.PythonActivity
|
||||
|
||||
# (str) Extra xml to write directly inside the <manifest> element of AndroidManifest.xml
|
||||
# use that parameter to provide a filename from where to load your custom XML code
|
||||
#android.extra_manifest_xml = ./src/android/extra_manifest.xml
|
||||
|
||||
# (str) Extra xml to write directly inside the <manifest><application> tag of AndroidManifest.xml
|
||||
# use that parameter to provide a filename from where to load your custom XML arguments:
|
||||
#android.extra_manifest_application_arguments = ./src/android/extra_manifest_application_arguments.xml
|
||||
|
||||
# (str) Full name including package path of the Java class that implements Python Service
|
||||
# use that parameter to set custom Java class which extends PythonService
|
||||
#android.service_class_name = org.kivy.android.PythonService
|
||||
|
||||
# (str) Android app theme, default is ok for Kivy-based app
|
||||
# android.apptheme = "@android:style/Theme.NoTitleBar"
|
||||
|
||||
# (list) Pattern to whitelist for the whole project
|
||||
#android.whitelist =
|
||||
|
||||
# (str) Path to a custom whitelist file
|
||||
#android.whitelist_src =
|
||||
|
||||
# (str) Path to a custom blacklist file
|
||||
#android.blacklist_src =
|
||||
|
||||
# (list) List of Java .jar files to add to the libs so that pyjnius can access
|
||||
# their classes. Don't add jars that you do not need, since extra jars can slow
|
||||
# down the build process. Allows wildcards matching, for example:
|
||||
# OUYA-ODK/libs/*.jar
|
||||
#android.add_jars = foo.jar,bar.jar,path/to/more/*.jar
|
||||
|
||||
# (list) List of Java files to add to the android project (can be java or a
|
||||
# directory containing the files)
|
||||
#android.add_src =
|
||||
|
||||
# (list) Android AAR archives to add
|
||||
#android.add_aars =
|
||||
|
||||
# (list) Put these files or directories in the apk assets directory.
|
||||
# Either form may be used, and assets need not be in 'source.include_exts'.
|
||||
# 1) android.add_assets = source_asset_relative_path
|
||||
# 2) android.add_assets = source_asset_path:destination_asset_relative_path
|
||||
#android.add_assets =
|
||||
|
||||
# (list) Put these files or directories in the apk res directory.
|
||||
# The option may be used in three ways, the value may contain one or zero ':'
|
||||
# Some examples:
|
||||
# 1) A file to add to resources, legal resource names contain ['a-z','0-9','_']
|
||||
# android.add_resources = my_icons/all-inclusive.png:drawable/all_inclusive.png
|
||||
# 2) A directory, here 'legal_icons' must contain resources of one kind
|
||||
# android.add_resources = legal_icons:drawable
|
||||
# 3) A directory, here 'legal_resources' must contain one or more directories,
|
||||
# each of a resource kind: drawable, xml, etc...
|
||||
# android.add_resources = legal_resources
|
||||
#android.add_resources =
|
||||
|
||||
# (list) Gradle dependencies to add
|
||||
#android.gradle_dependencies =
|
||||
|
||||
# (bool) Enable AndroidX support. Enable when 'android.gradle_dependencies'
|
||||
# contains an 'androidx' package, or any package from Kotlin source.
|
||||
# android.enable_androidx requires android.api >= 28
|
||||
#android.enable_androidx = True
|
||||
|
||||
# (list) add java compile options
|
||||
# this can for example be necessary when importing certain java libraries using the 'android.gradle_dependencies' option
|
||||
# see https://developer.android.com/studio/write/java8-support for further information
|
||||
# android.add_compile_options = "sourceCompatibility = 1.8", "targetCompatibility = 1.8"
|
||||
|
||||
# (list) Gradle repositories to add {can be necessary for some android.gradle_dependencies}
|
||||
# please enclose in double quotes
|
||||
# e.g. android.gradle_repositories = "maven { url 'https://kotlin.bintray.com/ktor' }"
|
||||
#android.add_gradle_repositories =
|
||||
|
||||
# (list) packaging options to add
|
||||
# see https://google.github.io/android-gradle-dsl/current/com.android.build.gradle.internal.dsl.PackagingOptions.html
|
||||
# can be necessary to solve conflicts in gradle_dependencies
|
||||
# please enclose in double quotes
|
||||
# e.g. android.add_packaging_options = "exclude 'META-INF/common.kotlin_module'", "exclude 'META-INF/*.kotlin_module'"
|
||||
#android.add_packaging_options =
|
||||
|
||||
# (list) Java classes to add as activities to the manifest.
|
||||
#android.add_activities = com.example.ExampleActivity
|
||||
|
||||
# (str) OUYA Console category. Should be one of GAME or APP
|
||||
# If you leave this blank, OUYA support will not be enabled
|
||||
#android.ouya.category = GAME
|
||||
|
||||
# (str) Filename of OUYA Console icon. It must be a 732x412 png image.
|
||||
#android.ouya.icon.filename = %(source.dir)s/data/ouya_icon.png
|
||||
|
||||
# (str) XML file to include as an intent filters in <activity> tag
|
||||
#android.manifest.intent_filters =
|
||||
|
||||
# (list) Copy these files to src/main/res/xml/ (used for example with intent-filters)
|
||||
#android.res_xml = PATH_TO_FILE,
|
||||
|
||||
# (str) launchMode to set for the main activity
|
||||
#android.manifest.launch_mode = standard
|
||||
|
||||
# (str) screenOrientation to set for the main activity.
|
||||
# Valid values can be found at https://developer.android.com/guide/topics/manifest/activity-element
|
||||
#android.manifest.orientation = fullSensor
|
||||
|
||||
# (list) Android additional libraries to copy into libs/armeabi
|
||||
#android.add_libs_armeabi = libs/android/*.so
|
||||
#android.add_libs_armeabi_v7a = libs/android-v7/*.so
|
||||
#android.add_libs_arm64_v8a = libs/android-v8/*.so
|
||||
#android.add_libs_x86 = libs/android-x86/*.so
|
||||
#android.add_libs_mips = libs/android-mips/*.so
|
||||
|
||||
# (bool) Indicate whether the screen should stay on
|
||||
# Don't forget to add the WAKE_LOCK permission if you set this to True
|
||||
#android.wakelock = False
|
||||
|
||||
# (list) Android application meta-data to set (key=value format)
|
||||
#android.meta_data =
|
||||
|
||||
# (list) Android library project to add (will be added in the
|
||||
# project.properties automatically.)
|
||||
#android.library_references =
|
||||
|
||||
# (list) Android shared libraries which will be added to AndroidManifest.xml using <uses-library> tag
|
||||
#android.uses_library =
|
||||
|
||||
# (str) Android logcat filters to use
|
||||
#android.logcat_filters = *:S python:D
|
||||
|
||||
# (bool) Android logcat only display log for activity's pid
|
||||
#android.logcat_pid_only = False
|
||||
|
||||
# (str) Android additional adb arguments
|
||||
#android.adb_args = -H host.docker.internal
|
||||
|
||||
# (bool) Copy library instead of making a libpymodules.so
|
||||
#android.copy_libs = 1
|
||||
|
||||
# (list) The Android archs to build for, choices: armeabi-v7a, arm64-v8a, x86, x86_64
|
||||
# In past, was `android.arch` as we weren't supporting builds for multiple archs at the same time.
|
||||
android.archs = arm64-v8a, armeabi-v7a
|
||||
|
||||
# (int) overrides automatic versionCode computation (used in build.gradle)
|
||||
# this is not the same as app version and should only be edited if you know what you're doing
|
||||
# android.numeric_version = 1
|
||||
|
||||
# (bool) enables Android auto backup feature (Android API >=23)
|
||||
android.allow_backup = True
|
||||
|
||||
# (str) XML file for custom backup rules (see official auto backup documentation)
|
||||
# android.backup_rules =
|
||||
|
||||
# (str) If you need to insert variables into your AndroidManifest.xml file,
|
||||
# you can do so with the manifestPlaceholders property.
|
||||
# This property takes a map of key-value pairs. (via a string)
|
||||
# Usage example : android.manifest_placeholders = [myCustomUrl:\"org.kivy.customurl\"]
|
||||
# android.manifest_placeholders = [:]
|
||||
|
||||
# (bool) Skip byte compile for .py files
|
||||
# android.no-byte-compile-python = False
|
||||
|
||||
# (str) The format used to package the app for release mode (aab or apk or aar).
|
||||
# android.release_artifact = aab
|
||||
|
||||
# (str) The format used to package the app for debug mode (apk or aar).
|
||||
# android.debug_artifact = apk
|
||||
|
||||
#
|
||||
# Python for android (p4a) specific
|
||||
#
|
||||
|
||||
# (str) python-for-android URL to use for checkout
|
||||
#p4a.url =
|
||||
|
||||
# (str) python-for-android fork to use in case if p4a.url is not specified, defaults to upstream (kivy)
|
||||
#p4a.fork = kivy
|
||||
|
||||
# (str) python-for-android branch to use, defaults to master
|
||||
#p4a.branch = master
|
||||
|
||||
# (str) python-for-android specific commit to use, defaults to HEAD, must be within p4a.branch
|
||||
#p4a.commit = HEAD
|
||||
|
||||
# (str) python-for-android git clone directory (if empty, it will be automatically cloned from github)
|
||||
#p4a.source_dir =
|
||||
|
||||
# (str) The directory in which python-for-android should look for your own build recipes (if any)
|
||||
#p4a.local_recipes =
|
||||
|
||||
# (str) Filename to the hook for p4a
|
||||
#p4a.hook =
|
||||
|
||||
# (str) Bootstrap to use for android builds
|
||||
# p4a.bootstrap = sdl2
|
||||
|
||||
# (int) port number to specify an explicit --port= p4a argument (eg for bootstrap flask)
|
||||
#p4a.port =
|
||||
|
||||
# Control passing the --use-setup-py vs --ignore-setup-py to p4a
|
||||
# "in the future" --use-setup-py is going to be the default behaviour in p4a, right now it is not
|
||||
# Setting this to false will pass --ignore-setup-py, true will pass --use-setup-py
|
||||
# NOTE: this is general setuptools integration, having pyproject.toml is enough, no need to generate
|
||||
# setup.py if you're using Poetry, but you need to add "toml" to source.include_exts.
|
||||
#p4a.setup_py = false
|
||||
|
||||
# (str) extra command line arguments to pass when invoking pythonforandroid.toolchain
|
||||
#p4a.extra_args =
|
||||
|
||||
|
||||
|
||||
#
|
||||
# iOS specific
|
||||
#
|
||||
|
||||
# (str) Path to a custom kivy-ios folder
|
||||
#ios.kivy_ios_dir = ../kivy-ios
|
||||
# Alternately, specify the URL and branch of a git checkout:
|
||||
ios.kivy_ios_url = https://github.com/kivy/kivy-ios
|
||||
ios.kivy_ios_branch = master
|
||||
|
||||
# Another platform dependency: ios-deploy
|
||||
# Uncomment to use a custom checkout
|
||||
#ios.ios_deploy_dir = ../ios_deploy
|
||||
# Or specify URL and branch
|
||||
ios.ios_deploy_url = https://github.com/phonegap/ios-deploy
|
||||
ios.ios_deploy_branch = 1.10.0
|
||||
|
||||
# (bool) Whether or not to sign the code
|
||||
ios.codesign.allowed = false
|
||||
|
||||
# (str) Name of the certificate to use for signing the debug version
|
||||
# Get a list of available identities: buildozer ios list_identities
|
||||
#ios.codesign.debug = "iPhone Developer: <lastname> <firstname> (<hexstring>)"
|
||||
|
||||
# (str) The development team to use for signing the debug version
|
||||
#ios.codesign.development_team.debug = <hexstring>
|
||||
|
||||
# (str) Name of the certificate to use for signing the release version
|
||||
#ios.codesign.release = %(ios.codesign.debug)s
|
||||
|
||||
# (str) The development team to use for signing the release version
|
||||
#ios.codesign.development_team.release = <hexstring>
|
||||
|
||||
# (str) URL pointing to .ipa file to be installed
|
||||
# This option should be defined along with `display_image_url` and `full_size_image_url` options.
|
||||
#ios.manifest.app_url =
|
||||
|
||||
# (str) URL pointing to an icon (57x57px) to be displayed during download
|
||||
# This option should be defined along with `app_url` and `full_size_image_url` options.
|
||||
#ios.manifest.display_image_url =
|
||||
|
||||
# (str) URL pointing to a large icon (512x512px) to be used by iTunes
|
||||
# This option should be defined along with `app_url` and `display_image_url` options.
|
||||
#ios.manifest.full_size_image_url =
|
||||
|
||||
|
||||
[buildozer]
|
||||
|
||||
# (int) Log level (0 = error only, 1 = info, 2 = debug (with command output))
|
||||
log_level = 2
|
||||
|
||||
# (int) Display warning if buildozer is run as root (0 = False, 1 = True)
|
||||
warn_on_root = 1
|
||||
|
||||
# (str) Path to build artifact storage, absolute or relative to spec file
|
||||
# build_dir = ./.buildozer
|
||||
|
||||
# (str) Path to build output (i.e. .apk, .aab, .ipa) storage
|
||||
# bin_dir = ./bin
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# List as sections
|
||||
#
|
||||
# You can define all the "list" as [section:key].
|
||||
# Each line will be considered as a option to the list.
|
||||
# Let's take [app] / source.exclude_patterns.
|
||||
# Instead of doing:
|
||||
#
|
||||
#[app]
|
||||
#source.exclude_patterns = license,data/audio/*.wav,data/images/original/*
|
||||
#
|
||||
# This can be translated into:
|
||||
#
|
||||
#[app:source.exclude_patterns]
|
||||
#license
|
||||
#data/audio/*.wav
|
||||
#data/images/original/*
|
||||
#
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Profiles
|
||||
#
|
||||
# You can extend section / key with a profile
|
||||
# For example, you want to deploy a demo version of your application without
|
||||
# HD content. You could first change the title to add "(demo)" in the name
|
||||
# and extend the excluded directories to remove the HD content.
|
||||
#
|
||||
#[app@demo]
|
||||
#title = My Application (demo)
|
||||
#
|
||||
#[app:source.exclude_patterns@demo]
|
||||
#images/hd/*
|
||||
#
|
||||
# Then, invoke the command line with the "demo" profile:
|
||||
#
|
||||
#buildozer --profile demo android debug
|
247
kivy_venv/bin/Activate.ps1
Normal file
247
kivy_venv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
69
kivy_venv/bin/activate
Normal file
69
kivy_venv/bin/activate
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV="/home/meko/Builds/test-kivy-app/kivy_venv"
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1="(kivy_venv) ${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT="(kivy_venv) "
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
26
kivy_venv/bin/activate.csh
Normal file
26
kivy_venv/bin/activate.csh
Normal file
@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/home/meko/Builds/test-kivy-app/kivy_venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = "(kivy_venv) $prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT "(kivy_venv) "
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
69
kivy_venv/bin/activate.fish
Normal file
69
kivy_venv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/home/meko/Builds/test-kivy-app/kivy_venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) "(kivy_venv) " (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT "(kivy_venv) "
|
||||
end
|
8
kivy_venv/bin/buildozer
Executable file
8
kivy_venv/bin/buildozer
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from buildozer.scripts.client import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/buildozer-remote
Executable file
8
kivy_venv/bin/buildozer-remote
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from buildozer.scripts.remote import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/cygdb
Executable file
8
kivy_venv/bin/cygdb
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Debugger.Cygdb import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/cython
Executable file
8
kivy_venv/bin/cython
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Compiler.Main import setuptools_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(setuptools_main())
|
8
kivy_venv/bin/cythonize
Executable file
8
kivy_venv/bin/cythonize
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from Cython.Build.Cythonize import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/docutils
Executable file
8
kivy_venv/bin/docutils
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
227
kivy_venv/bin/garden
Normal file
227
kivy_venv/bin/garden
Normal file
@ -0,0 +1,227 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import zipfile
|
||||
import tempfile
|
||||
from shutil import rmtree, move
|
||||
from os import listdir, getcwd, chdir, makedirs
|
||||
from os.path import join, realpath, exists, isdir, expanduser, abspath, dirname
|
||||
|
||||
try:
|
||||
from cBytesIO import BytesIO
|
||||
except ImportError:
|
||||
try:
|
||||
from BytesIO import BytesIO
|
||||
except ImportError:
|
||||
from io import BytesIO
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
print('Garden tool require requests library.')
|
||||
print('Try to "pip install requests" in root')
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
import kivy
|
||||
garden_kivy_dir = abspath(join(dirname(kivy.__file__), 'garden'))
|
||||
except ImportError:
|
||||
garden_kivy_dir = None
|
||||
|
||||
|
||||
garden_system_dir = join(expanduser('~'), '.kivy', 'garden')
|
||||
garden_app_dir = join(realpath(getcwd()), 'libs', 'garden')
|
||||
|
||||
|
||||
class GardenTool(object):
|
||||
'''Garden command-line tool.
|
||||
'''
|
||||
|
||||
def main(self, argv):
|
||||
parser = argparse.ArgumentParser(description=self.__doc__)
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
p = subparsers.add_parser('list',
|
||||
help='List all the installed garden packages')
|
||||
p.add_argument('--app', action='store_true',
|
||||
help='Use the local app directory (./libs/garden)')
|
||||
p.add_argument('--kivy', action='store_true',
|
||||
help='Use the kivy garden directory (kivy/garden)')
|
||||
p.set_defaults(func=self.cmd_list)
|
||||
|
||||
p = subparsers.add_parser('search',
|
||||
help='Search garden package on github')
|
||||
p.add_argument('pattern', nargs='?', default='',
|
||||
help='Word to search in the package name (optional)')
|
||||
p.set_defaults(func=self.cmd_search)
|
||||
|
||||
p = subparsers.add_parser('install',
|
||||
help='Install a garden package')
|
||||
p.add_argument('--app', action='store_true',
|
||||
help='Install in the local app directory (./libs/garden)')
|
||||
p.add_argument('--kivy', action='store_true',
|
||||
help='Use the kivy garden directory (kivy/garden)')
|
||||
p.add_argument('--upgrade', action='store_true',
|
||||
help='Force the installation')
|
||||
p.add_argument('--no-download-progress', action='store_false', dest='animate',
|
||||
help='Disable download progress indicator')
|
||||
p.add_argument('package', nargs=1,
|
||||
help='Name of the package to install')
|
||||
p.set_defaults(func=self.cmd_install)
|
||||
|
||||
p = subparsers.add_parser('uninstall',
|
||||
help='Uninstall a garden package')
|
||||
p.add_argument('--app', action='store_true',
|
||||
help='Use the local app directory (./libs/garden)')
|
||||
p.add_argument('--kivy', action='store_true',
|
||||
help='Use the kivy garden directory (kivy/garden)')
|
||||
p.add_argument('package', nargs=1,
|
||||
help='Name of the package to uninstall')
|
||||
p.set_defaults(func=self.cmd_uninstall)
|
||||
|
||||
self.options = options = parser.parse_args(argv)
|
||||
options.package = [p.lower() for p in getattr(options, 'package', ())]
|
||||
|
||||
if hasattr(options, 'func'):
|
||||
options.func()
|
||||
|
||||
# No cmd supplied, print help message
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
if getattr(self.options, 'kivy', False) and garden_kivy_dir is None:
|
||||
print('--kivy provided; cannot find kivy')
|
||||
sys.exit(0)
|
||||
|
||||
def cmd_list(self):
|
||||
if self.options.kivy:
|
||||
for filename in listdir(garden_kivy_dir):
|
||||
fullname = join(garden_kivy_dir, filename)
|
||||
if isdir(fullname):
|
||||
print(fullname)
|
||||
return
|
||||
|
||||
directory = garden_app_dir if self.options.app else garden_system_dir
|
||||
if not exists(directory):
|
||||
return
|
||||
|
||||
for filename in listdir(directory):
|
||||
fullname = join(directory, filename)
|
||||
if filename.startswith('garden.') and isdir(fullname):
|
||||
print(filename.split('.', 1)[-1])
|
||||
|
||||
def cmd_search(self):
|
||||
r = requests.get('https://api.github.com/users/kivy-garden/repos')
|
||||
pattern = self.options.pattern
|
||||
data = r.json()
|
||||
for repo in data:
|
||||
if not repo['name'].startswith('garden.'):
|
||||
continue
|
||||
name = repo['name'].split('.', 1)[-1]
|
||||
if pattern and pattern not in name:
|
||||
continue
|
||||
desc = repo['description']
|
||||
|
||||
print("{} - {}".format(
|
||||
name, desc.splitlines()[0] if desc else 'No description'))
|
||||
|
||||
def cmd_install(self):
|
||||
opts = self.options
|
||||
src_package = self.gardenify(opts.package[0])
|
||||
dst_package = opts.package[0] if self.options.kivy else src_package
|
||||
|
||||
garden_dir = garden_kivy_dir if self.options.kivy else (
|
||||
garden_app_dir if self.options.app else garden_system_dir)
|
||||
dest_dir = join(garden_dir, dst_package)
|
||||
|
||||
if exists(dest_dir) and not opts.upgrade:
|
||||
print('Garden package already installed in {}'.format(dest_dir))
|
||||
print('Use --upgrade to upgrade.')
|
||||
sys.exit(0)
|
||||
|
||||
fd = self.download(src_package, opts.animate)
|
||||
tempdir = tempfile.mkdtemp(prefix='garden-')
|
||||
try:
|
||||
self.extract(fd, tempdir)
|
||||
|
||||
if not exists(garden_dir):
|
||||
makedirs(garden_dir)
|
||||
|
||||
if exists(dest_dir):
|
||||
print('Removing old version...')
|
||||
rmtree(dest_dir)
|
||||
|
||||
source_directory = join(tempdir, '{}-master'.format(src_package))
|
||||
|
||||
print('Installing new version...')
|
||||
move(source_directory, dest_dir)
|
||||
|
||||
print('Done! {} is installed at: {}'.format(src_package,
|
||||
dest_dir))
|
||||
|
||||
finally:
|
||||
print('Cleaning...')
|
||||
if exists(tempdir):
|
||||
rmtree(tempdir, ignore_errors=True)
|
||||
|
||||
def cmd_uninstall(self):
|
||||
opts = self.options
|
||||
package = opts.package[0] if self.options.kivy else \
|
||||
self.gardenify(opts.package[0])
|
||||
garden_dir = garden_kivy_dir if self.options.kivy else (
|
||||
garden_app_dir if self.options.app else garden_system_dir)
|
||||
|
||||
d = join(garden_dir, package)
|
||||
if not exists(d):
|
||||
print('Package {} not installed, nothing to uninstall.'.format(
|
||||
package))
|
||||
sys.exit(0)
|
||||
|
||||
print('Deleting {}...'.format(d))
|
||||
rmtree(d)
|
||||
|
||||
|
||||
def gardenify(self, package):
|
||||
if not package.startswith('garden.'):
|
||||
return 'garden.' + package
|
||||
return package
|
||||
|
||||
def download(self, package, animate):
|
||||
url = 'https://github.com/kivy-garden/{}/archive/master.zip'.format(
|
||||
package)
|
||||
|
||||
print('Downloading {} ...'.format(url))
|
||||
r = requests.get(url)#, prefetch=False)
|
||||
if r.status_code != 200:
|
||||
print('Unable to find the garden package. (error={})'.format(
|
||||
r.status_code))
|
||||
sys.exit(1)
|
||||
|
||||
animation = '\\|/-'
|
||||
index = 0
|
||||
count = 0
|
||||
data = b''
|
||||
for buf in r.iter_content(1024):
|
||||
index += 1
|
||||
data += buf
|
||||
count += len(buf)
|
||||
if animate:
|
||||
print('Progression', count, animation[index % len(animation)], '\r')
|
||||
sys.stdout.flush()
|
||||
print('Download done ({} downloaded)'.format(count))
|
||||
|
||||
return BytesIO(data)
|
||||
|
||||
def extract(self, fd, directory):
|
||||
print('Extracting...')
|
||||
z = zipfile.ZipFile(fd)
|
||||
curdir = getcwd()
|
||||
chdir(directory)
|
||||
z.extractall()
|
||||
chdir(curdir)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
GardenTool().main(sys.argv[1:])
|
1
kivy_venv/bin/garden.bat
Normal file
1
kivy_venv/bin/garden.bat
Normal file
@ -0,0 +1 @@
|
||||
python.exe "%~dp0garden" %*
|
8
kivy_venv/bin/kivymd.add_view
Executable file
8
kivy_venv/bin/kivymd.add_view
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from kivymd.tools.patterns.add_view import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/kivymd.create_project
Executable file
8
kivy_venv/bin/kivymd.create_project
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from kivymd.tools.patterns.create_project import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/kivymd.make_release
Executable file
8
kivy_venv/bin/kivymd.make_release
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from kivymd.tools.release.make_release import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/normalizer
Executable file
8
kivy_venv/bin/normalizer
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from charset_normalizer.cli import cli_detect
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli_detect())
|
8
kivy_venv/bin/pip
Executable file
8
kivy_venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/pip3
Executable file
8
kivy_venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/pip3.11
Executable file
8
kivy_venv/bin/pip3.11
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/pygmentize
Executable file
8
kivy_venv/bin/pygmentize
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pygments.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
kivy_venv/bin/pyproject-build
Executable file
8
kivy_venv/bin/pyproject-build
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from build.__main__ import entrypoint
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(entrypoint())
|
1
kivy_venv/bin/python
Symbolic link
1
kivy_venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
1
kivy_venv/bin/python3
Symbolic link
1
kivy_venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
1
kivy_venv/bin/python3.11
Symbolic link
1
kivy_venv/bin/python3.11
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
8
kivy_venv/bin/rst2html
Executable file
8
kivy_venv/bin/rst2html
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2html
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2html())
|
8
kivy_venv/bin/rst2html4
Executable file
8
kivy_venv/bin/rst2html4
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2html4
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2html4())
|
8
kivy_venv/bin/rst2html5
Executable file
8
kivy_venv/bin/rst2html5
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2html5
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2html5())
|
8
kivy_venv/bin/rst2latex
Executable file
8
kivy_venv/bin/rst2latex
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2latex
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2latex())
|
8
kivy_venv/bin/rst2man
Executable file
8
kivy_venv/bin/rst2man
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2man
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2man())
|
8
kivy_venv/bin/rst2odt
Executable file
8
kivy_venv/bin/rst2odt
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2odt
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2odt())
|
8
kivy_venv/bin/rst2pseudoxml
Executable file
8
kivy_venv/bin/rst2pseudoxml
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2pseudoxml
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2pseudoxml())
|
8
kivy_venv/bin/rst2s5
Executable file
8
kivy_venv/bin/rst2s5
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2s5
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2s5())
|
8
kivy_venv/bin/rst2xetex
Executable file
8
kivy_venv/bin/rst2xetex
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2xetex
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2xetex())
|
8
kivy_venv/bin/rst2xml
Executable file
8
kivy_venv/bin/rst2xml
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.core import rst2xml
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(rst2xml())
|
8
kivy_venv/bin/virtualenv
Executable file
8
kivy_venv/bin/virtualenv
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/meko/Builds/test-kivy-app/kivy_venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from virtualenv.__main__ import run_with_catch
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run_with_catch())
|
@ -0,0 +1,19 @@
|
||||
The original Pyrex code as of 2006-04 is licensed under the following
|
||||
license: "Copyright stuff: Pyrex is free of restrictions. You may use,
|
||||
redistribute, modify and distribute modified versions."
|
||||
|
||||
------------------
|
||||
|
||||
Cython, which derives from Pyrex, is licensed under the Apache 2.0
|
||||
Software License. More precisely, all modifications and new code
|
||||
made to go from Pyrex to Cython are so licensed.
|
||||
|
||||
See LICENSE.txt for more details.
|
||||
|
||||
------------------
|
||||
|
||||
The output of a Cython compilation is NOT considered a derivative
|
||||
work of Cython. Specifically, though the compilation process may
|
||||
embed snippets of varying lengths into the final output, these
|
||||
snippets, as embedded in the output, do not encumber the resulting
|
||||
output with any license restrictions.
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
@ -0,0 +1,62 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Cython
|
||||
Version: 0.29.33
|
||||
Summary: The Cython compiler for writing C extensions for the Python language.
|
||||
Home-page: http://cython.org/
|
||||
Author: Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al.
|
||||
Author-email: cython-devel@python.org
|
||||
License: Apache
|
||||
Project-URL: Documentation, https://cython.readthedocs.io/
|
||||
Project-URL: Donate, https://cython.readthedocs.io/en/latest/src/donating.html
|
||||
Project-URL: Source Code, https://github.com/cython/cython
|
||||
Project-URL: Bug Tracker, https://github.com/cython/cython/issues
|
||||
Project-URL: User Group, https://groups.google.com/g/cython-users
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Programming Language :: C
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Topic :: Software Development :: Code Generators
|
||||
Classifier: Topic :: Software Development :: Compilers
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*
|
||||
License-File: LICENSE.txt
|
||||
License-File: COPYING.txt
|
||||
|
||||
The Cython language makes writing C extensions for the Python language as
|
||||
easy as Python itself. Cython is a source code translator based on Pyrex_,
|
||||
but supports more cutting edge functionality and optimizations.
|
||||
|
||||
The Cython language is a superset of the Python language (almost all Python
|
||||
code is also valid Cython code), but Cython additionally supports optional
|
||||
static typing to natively call C functions, operate with C++ classes and
|
||||
declare fast C types on variables and class attributes. This allows the
|
||||
compiler to generate very efficient C code from Cython code.
|
||||
|
||||
This makes Cython the ideal language for writing glue code for external
|
||||
C/C++ libraries, and for fast C modules that speed up the execution of
|
||||
Python code.
|
||||
|
||||
Note that for one-time builds, e.g. for CI/testing, on platforms that are not
|
||||
covered by one of the wheel packages provided on PyPI *and* the pure Python wheel
|
||||
that we provide is not used, it is substantially faster than a full source build
|
||||
to install an uncompiled (slower) version of Cython with::
|
||||
|
||||
pip install Cython --install-option="--no-cython-compile"
|
||||
|
||||
.. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
|
@ -0,0 +1,433 @@
|
||||
../../../bin/cygdb,sha256=Hv8ChWpjEB2IhgB-n6dY_oP-UfHI3f3sCAnvHlIs5eM,257
|
||||
../../../bin/cython,sha256=A1clLvq8R59bOxdo8by4y8mpt56Py-Hl87u2LGv3gpI,278
|
||||
../../../bin/cythonize,sha256=ByaP6JywxsY2js5mclrPxG3j7-ODHirJ758V9bciqqU,258
|
||||
Cython-0.29.33.dist-info/COPYING.txt,sha256=4escSahQjoFz2sMBV-SmQ5pErYhGGUdGxCT7w_wrldc,756
|
||||
Cython-0.29.33.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Cython-0.29.33.dist-info/LICENSE.txt,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
|
||||
Cython-0.29.33.dist-info/METADATA,sha256=MAcwJjjEiJYso8XOykN7PGN5dWEHrCl2g1CrnKhViD4,3096
|
||||
Cython-0.29.33.dist-info/RECORD,,
|
||||
Cython-0.29.33.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
Cython-0.29.33.dist-info/WHEEL,sha256=l362OvyXRXwtCwxOGrLP3yrtBKPotk248SCsHuB4Lt8,191
|
||||
Cython-0.29.33.dist-info/entry_points.txt,sha256=VU8NX8gnQyFbyqiWMzfh9BHvYMuoQRS3Nbm3kKcKQeY,139
|
||||
Cython-0.29.33.dist-info/top_level.txt,sha256=jLV8tZV98iCbIfiJR4DVzTX5Ru1Y_pYMZ59wkMCe6SY,24
|
||||
Cython/Build/BuildExecutable.py,sha256=9wjcOncQpbCT_Pu2Ljj5jufFLSY6T_oHOFt0uXYTdRk,4318
|
||||
Cython/Build/Cythonize.py,sha256=OBSutgd93PXiRi5_6GRuhpfMZehw9YOfRgt1mxi6QLk,8359
|
||||
Cython/Build/Dependencies.py,sha256=mlx2cxzyr19LyWp4ElaG3Ea-za-I_LXO8MnUAM3mAE0,49607
|
||||
Cython/Build/Distutils.py,sha256=iO5tPX84Kc-ZWMocfuQbl_PqyC9HGGIRS-NiKI60-ZE,49
|
||||
Cython/Build/Inline.py,sha256=wd5_xBOup8MWxNtv5uwWgWWG5GrkShYysMMluY7ripE,13451
|
||||
Cython/Build/IpythonMagic.py,sha256=j-E-JJYGj03ceWDp5SRmyK-WxyroRkNOEQODA9rCfFc,21126
|
||||
Cython/Build/Tests/TestCyCache.py,sha256=olOvphv4q1CLsNzMAhgmXnL77zhGGQKuKbYs_iSFFvA,4151
|
||||
Cython/Build/Tests/TestInline.py,sha256=bp4XGXZYSyxY2zXI0q4bg58a0ARsclUzXSfWykn0dZw,2854
|
||||
Cython/Build/Tests/TestIpythonMagic.py,sha256=H88J6_r3dP5V3-N7eSKZuE2l9G8BtkFF_o8wY7pifKU,6225
|
||||
Cython/Build/Tests/TestStripLiterals.py,sha256=-QeUd22OnoL50rW2EgpfIA01UzRMutcBA5NrhkHiE7M,1550
|
||||
Cython/Build/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Build/Tests/__pycache__/TestCyCache.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestInline.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-311.pyc,,
|
||||
Cython/Build/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Build/__init__.py,sha256=zBhW6hT9Mwk1ZybfuPi61iCa6A4srId1HJz9OiEd07o,69
|
||||
Cython/Build/__pycache__/BuildExecutable.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Cythonize.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Dependencies.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Distutils.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/Inline.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/IpythonMagic.cpython-311.pyc,,
|
||||
Cython/Build/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/CodeWriter.py,sha256=Sa1hLmUcIMnCGTNASqyzfGqk70i6v-YslD5ogllCaiY,23901
|
||||
Cython/Compiler/AnalysedTreeTransforms.py,sha256=T2r1SLFeG7a4D9tt93hm8YRH_aGLwmK82PNpb1RsbnE,3826
|
||||
Cython/Compiler/Annotate.py,sha256=bkVgdH3ItuIbaSpi9Qo5bKiIjOX3-J5sZORfcyk_eZY,12950
|
||||
Cython/Compiler/AutoDocTransforms.py,sha256=le7k-xhGWDkvpRXRppZsqhJzkav6i41GmAOmDetxVmk,7517
|
||||
Cython/Compiler/Buffer.py,sha256=6-YIUiLWtgpC2LRhs7116umzNiEbzA6cz1lnPkSEn30,29035
|
||||
Cython/Compiler/Builtin.py,sha256=fdR58NDTWNQWO3tA0JtAzUq9JfXL6pJdEOSHTcz6BZA,22469
|
||||
Cython/Compiler/CmdLine.py,sha256=mZIKTOwjX_K5lWdjHufuhqBHaeEaYpQVIzNiYx5VWKE,10819
|
||||
Cython/Compiler/Code.pxd,sha256=XEZU44jXCOFoYTG15-R0X41krTrjpuo1wxRKjyzYwoY,3354
|
||||
Cython/Compiler/Code.py,sha256=VqhIddTqD7JpJfGZnFR16CFq9KkaY48QFdu51omzjuo,97241
|
||||
Cython/Compiler/CodeGeneration.py,sha256=jkcx2uX07nck0UZSgysIThRuJiPbdkSeXR4Z2uzbQU8,1108
|
||||
Cython/Compiler/CythonScope.py,sha256=mNwmE509uePmR3S2djg3Dq6zOZ3hgK-U8NDeawys9WM,6027
|
||||
Cython/Compiler/DebugFlags.py,sha256=5Zg9ETp0qPFEma6QMtrGUwu9Fn6NTYMBMWPI_GxFW0A,623
|
||||
Cython/Compiler/Errors.py,sha256=GATz9x6onls09cM6TeDw3kdBgdCxUiKJBILwukBF6WI,7554
|
||||
Cython/Compiler/ExprNodes.py,sha256=ZzkQzNrqSXdvpisV_BSqAXze3Z8-OjZQ76J8BS9knPQ,550360
|
||||
Cython/Compiler/FlowControl.cpython-311-x86_64-linux-gnu.so,sha256=xpd47rKf_1_TNfoyo57HMcNvDgwmpU6bb4hDRC-XZ_0,659248
|
||||
Cython/Compiler/FlowControl.pxd,sha256=W8bqGCJLzvAhnL3d1OF8798ZDJg0QI0eA_ebnA4dkoQ,2918
|
||||
Cython/Compiler/FlowControl.py,sha256=-I33Yd9sp1RCENjFkrufBM8Xrj3y5SI7KZ04Vz5Djiw,45981
|
||||
Cython/Compiler/FusedNode.cpython-311-x86_64-linux-gnu.so,sha256=H9yTXZesKsZ83pBX321w0dzK-N2pOu7ZG38iRdby2zY,453040
|
||||
Cython/Compiler/FusedNode.py,sha256=qmHVHylEPpllK_x-471xBG-zMzAt0RF52tLxt_-RZqs,37794
|
||||
Cython/Compiler/Future.py,sha256=GwcWZ_Vti0atfbOARfS2kIvZOvRuPu38wbShIn4o4kA,587
|
||||
Cython/Compiler/Interpreter.py,sha256=iNweexX2HDI5nZj2rzkW-lw9Rq3gzM__P7SBqH3uxbU,2106
|
||||
Cython/Compiler/Lexicon.py,sha256=Cw_wIfQymcTEdkoo82V2xbV8kvCp30O-Pc7qF4hbfCI,4855
|
||||
Cython/Compiler/Main.py,sha256=4mlK7KdiXFO_bu55bJ10m7OJ3tqmsoYtScDNFK_u-GY,37082
|
||||
Cython/Compiler/MemoryView.py,sha256=c6J7PtQ6wccb9uBxvbLngia4jO-h2uea7viIzJNhDYU,30009
|
||||
Cython/Compiler/ModuleNode.py,sha256=yutAZV37yXwic5GSssNiVIiBldE_bQu1OYhTko1DFPc,141980
|
||||
Cython/Compiler/Naming.py,sha256=Z6FLRoOl21F91SMoIoMSu_CqYosGhxDJLO9grYSHhVI,6333
|
||||
Cython/Compiler/Nodes.py,sha256=-1_SeKkQ3ijDWshDk6ge0TgiBP4NnwdhNWlU9EI37xM,391550
|
||||
Cython/Compiler/Optimize.py,sha256=MJ5w9bXi6qU-yvDaiiZdBMSnp8g-QhEhBhs1zpWVICQ,210296
|
||||
Cython/Compiler/Options.py,sha256=MPnBwPqSHWTM1tf5c0VSI9RdSSFTmg4M4zlIbCIk_Zw,19742
|
||||
Cython/Compiler/ParseTreeTransforms.pxd,sha256=oDSda3XYP79o8tCAxEm_epWWVPXSaPy8lYrprWYRyNk,2468
|
||||
Cython/Compiler/ParseTreeTransforms.py,sha256=oLSgT3GwVZQ3wLE1mr0B5x-cJDyG-MpKLyPsYAbKnag,139345
|
||||
Cython/Compiler/Parsing.pxd,sha256=pL_EQdVWaw7EZVTZrxzMcUvoSJeAoXXPRh8kCLudysk,8984
|
||||
Cython/Compiler/Parsing.py,sha256=JS9wrFCRGOIbHgdr61CdyS0e86zfO199cVZi16IM8D4,130162
|
||||
Cython/Compiler/Pipeline.py,sha256=6ravd0QCR5sCoKlz9HEz209A2UqgLp4Qp0VysoKa_mI,14061
|
||||
Cython/Compiler/PyrexTypes.py,sha256=i46nr1fJazgCE47WDC5OFo1YEAdy9fZjOU3STi5mFnc,173999
|
||||
Cython/Compiler/Pythran.py,sha256=NHIml0yx0jPLyTLRAHXZr0LHTyEyfYqspgYuV4vdNKI,7267
|
||||
Cython/Compiler/Scanning.cpython-311-x86_64-linux-gnu.so,sha256=q_L9tGjJwukrRj0rz6ZUrRM5ABbipv7cDaIWqOwDef4,301904
|
||||
Cython/Compiler/Scanning.pxd,sha256=vjjPLZb5udPzMpk67DKojTTDUl31QU86oXyAMks7Hsw,2113
|
||||
Cython/Compiler/Scanning.py,sha256=Gl7sU5rI-5H5v8z9QLZBh1hivS2cOGa9H878QdEpUU4,18438
|
||||
Cython/Compiler/StringEncoding.py,sha256=dn3jVSL4JScbeYf1f56j5RJR9M58AnCDdJObC-cc3fg,10812
|
||||
Cython/Compiler/Symtab.py,sha256=n0tdTnbPkSuU5EgPkr6nGBN98Lhbklbjt8o6NUFt264,111727
|
||||
Cython/Compiler/Tests/TestBuffer.py,sha256=yw-KUu2pu4CnsqYaqxj5rpyYodmjJSEqUvQNxLwPL8I,4155
|
||||
Cython/Compiler/Tests/TestCmdLine.py,sha256=mT673jQ9MekvTExZdnQLztNabCsAMRSuN__egQ7rZ9A,6658
|
||||
Cython/Compiler/Tests/TestFlowControl.py,sha256=ge3iqBor6xe5MLaLbOtw7ETntJnAh8EequF1aetVzMw,1848
|
||||
Cython/Compiler/Tests/TestGrammar.py,sha256=zWMvYG19nIH85Le8ragXt2vLBlWlGGNeMgrTdQO5JGM,3443
|
||||
Cython/Compiler/Tests/TestMemView.py,sha256=yBAQ5tN8DVPTFRJ81dAzOepCt5Ly6fiaAssQve5ryy4,2515
|
||||
Cython/Compiler/Tests/TestParseTreeTransforms.py,sha256=xiRKOWqHIMIT1bbYB1o5TefKJXC8oPpjt79tQFodMRc,8900
|
||||
Cython/Compiler/Tests/TestSignatureMatching.py,sha256=qMiQZeg5_Eu8VfCY_lMawqpjpKNV0r6p6-9czKec1aY,3338
|
||||
Cython/Compiler/Tests/TestStringEncoding.py,sha256=RL1YDXrOUe1sPLEbWmTJQ5VF-uEZ_KLz0jaeQoMx85k,2315
|
||||
Cython/Compiler/Tests/TestTreeFragment.py,sha256=wHlnF0ApwxeITx9pzg46P9N_2lM-7mrPwhDeNlIQnXM,2206
|
||||
Cython/Compiler/Tests/TestTreePath.py,sha256=x-2KBIhSE6-vT-2BPe2q-zaa1oHtc42ibKzVs_y8_So,4238
|
||||
Cython/Compiler/Tests/TestTypes.py,sha256=YuFib5WCJfSPafrhy5yrCUdwajYw61yGPo4HveTyzUs,669
|
||||
Cython/Compiler/Tests/TestUtilityLoad.py,sha256=Uzf4_bOjha-zwQaikNbsAOVQs3ZPX3YD7QQ5T4s66YY,3341
|
||||
Cython/Compiler/Tests/TestVisitor.py,sha256=QAnBpUhnirSFKqXWiawo-OhXhxIRTQidWxEzGjJDz6M,2228
|
||||
Cython/Compiler/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Compiler/Tests/__pycache__/TestBuffer.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestCmdLine.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestFlowControl.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestGrammar.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestMemView.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestParseTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestSignatureMatching.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestStringEncoding.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTreeFragment.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTreePath.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestTypes.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestUtilityLoad.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/TestVisitor.cpython-311.pyc,,
|
||||
Cython/Compiler/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Compiler/TreeFragment.py,sha256=jQn4Lp2dNddJ-tjPquoFcyTcX9EIuTAbZKZAKs9-cGU,9408
|
||||
Cython/Compiler/TreePath.py,sha256=3_lScMAd2Sly2ekZ8HO8dyZstGSruINl2MXXq9OYd2Q,7641
|
||||
Cython/Compiler/TypeInference.py,sha256=s-GKZcq16KPPgY_OpF8cTlQmX1Cpu-qBMCtmAYDg8fc,22326
|
||||
Cython/Compiler/TypeSlots.py,sha256=zvQrFMKa6Pzk8jB187PuqFhdJUa82SQ1thztSV1ArhI,37837
|
||||
Cython/Compiler/UtilNodes.py,sha256=mS6jlZ530p17WGU0ApbwvLecuByT18LFipVrKJg5jrM,11636
|
||||
Cython/Compiler/UtilityCode.py,sha256=PbQtJt9fSwgm5xeXgYWQih6eUSmJL_RwTxOa5T9SrZU,9391
|
||||
Cython/Compiler/Version.py,sha256=f2mS6aYYdu0DMRK3B4IuzMlCo-k-ffmehCao_vKlTdk,181
|
||||
Cython/Compiler/Visitor.cpython-311-x86_64-linux-gnu.so,sha256=iuGmYYKAxDMkL95zIDoAFSNPHoVwBNghm7NoIa9xSRQ,355288
|
||||
Cython/Compiler/Visitor.pxd,sha256=KvOZgHoEREMTVYXr1ZoAk9H4n__rpmhIwE2S11ajeYM,1792
|
||||
Cython/Compiler/Visitor.py,sha256=iqtIsNaQwk8lSa6g_LnEU06CZtdnP7MDXbyodl4Ouwk,29984
|
||||
Cython/Compiler/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Compiler/__pycache__/AnalysedTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Annotate.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/AutoDocTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Buffer.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Builtin.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CmdLine.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Code.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CodeGeneration.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/CythonScope.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/DebugFlags.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Errors.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ExprNodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/FlowControl.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/FusedNode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Future.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Interpreter.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Lexicon.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Main.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/MemoryView.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ModuleNode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Naming.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Nodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Optimize.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Options.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/ParseTreeTransforms.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Parsing.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Pipeline.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/PyrexTypes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Pythran.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Scanning.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/StringEncoding.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Symtab.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TreeFragment.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TreePath.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TypeInference.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/TypeSlots.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/UtilNodes.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/UtilityCode.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Version.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/Visitor.cpython-311.pyc,,
|
||||
Cython/Compiler/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Coverage.py,sha256=FtCMjKLYWvtULxWIzN-y3RhbwQwL4GtLyIjj-B3-07E,13537
|
||||
Cython/Debugger/Cygdb.py,sha256=CH_pXm0Jhl4SAe6sJXa5NS47vMmQ2KBbecyV56vLqFE,5751
|
||||
Cython/Debugger/DebugWriter.py,sha256=Yzz28JR4qZepxvxeu_1rJxIjJ4JbNQm5vM5e_UtNuRo,1945
|
||||
Cython/Debugger/Tests/TestLibCython.py,sha256=xrENLEahnp6WtOfokVtsALR6Ot2jFR6T5ZZRcaX0Vxk,8327
|
||||
Cython/Debugger/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Debugger/Tests/__pycache__/TestLibCython.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/test_libcython_in_gdb.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/__pycache__/test_libpython_in_gdb.cpython-311.pyc,,
|
||||
Cython/Debugger/Tests/cfuncs.c,sha256=4SZurmnz5J1SiIs9N26Eu4zc2wvF_qMEKaN0eTcbDPo,71
|
||||
Cython/Debugger/Tests/codefile,sha256=ugwpT9GPtYZIKe2Xco4PqikyA-poQAeYfE0icXmfb44,641
|
||||
Cython/Debugger/Tests/test_libcython_in_gdb.py,sha256=EvPTYkd7nzR3JtFim-ASLI6wfcYhgWfI4BQrucLJbHY,15804
|
||||
Cython/Debugger/Tests/test_libpython_in_gdb.py,sha256=1BD_FtMkmS4SoSQZq7MgAgDnvqIw3EcYZFVrtoCQmxo,4079
|
||||
Cython/Debugger/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Debugger/__pycache__/Cygdb.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/DebugWriter.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/libcython.cpython-311.pyc,,
|
||||
Cython/Debugger/__pycache__/libpython.cpython-311.pyc,,
|
||||
Cython/Debugger/libcython.py,sha256=Qs0qGzeUyeY___3jRDy_WWIDFGfSRH4al7On2XxkuNg,44949
|
||||
Cython/Debugger/libpython.py,sha256=IyTEdtGLnpQmt2XPgZ7oskQ8qGWWV2_5TMgZ5NhTA0k,90489
|
||||
Cython/Debugging.py,sha256=vFtJhn7QstMf5gnYru2qHIz5ZjPg1KSlZVGHr-pBCwM,552
|
||||
Cython/Distutils/__init__.py,sha256=uyWaN2NJ_mKYLzVsDPi0qZCdIYoW5M_7YYEmAOIL3Ek,98
|
||||
Cython/Distutils/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/build_ext.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/extension.cpython-311.pyc,,
|
||||
Cython/Distutils/__pycache__/old_build_ext.cpython-311.pyc,,
|
||||
Cython/Distutils/build_ext.py,sha256=Fc_cI5wN0fT1Mf2k5B5nH-PgZ8Gq2lL6OlzF_qzy3dA,1007
|
||||
Cython/Distutils/extension.py,sha256=FHvtK3Tj9MqE17TuZ_jWg1Mh4X7e-CXIPUpJK7nqcQE,4706
|
||||
Cython/Distutils/old_build_ext.py,sha256=Hy34A1HqhoDOyU-krN2gJUYXK2mYWc8E2EZB-stvmrE,13635
|
||||
Cython/Includes/Deprecated/python.pxd,sha256=l7crg8H9cVRedMcjDf_9xDLdnUT57Vt1BxlZWom-h88,61
|
||||
Cython/Includes/Deprecated/python_bool.pxd,sha256=qOaFbsP6_pKoB3HGTjQUkFhQqukXheCmcSnnBQLdKGQ,66
|
||||
Cython/Includes/Deprecated/python_buffer.pxd,sha256=gv2a3ngcOnRKZZHSox_bW1WD8jGbxfH9NJm1-iUXf9U,68
|
||||
Cython/Includes/Deprecated/python_bytes.pxd,sha256=07-Hk3YpN_i4mIlbWYbNgDkjEytQAYOepJLJTY1CrVk,67
|
||||
Cython/Includes/Deprecated/python_cobject.pxd,sha256=V9F0DHQbFZPbJ8RRnN9mft2ipq4wubM8ghBCGHr6NwE,69
|
||||
Cython/Includes/Deprecated/python_complex.pxd,sha256=ITmq55v0b1gibEpLSCTCz68ViljenSuGGjiWn_nvIvI,69
|
||||
Cython/Includes/Deprecated/python_dict.pxd,sha256=gYhGkJhMmzWcrXoPnJHUcp-vdtcwUACbGlfv3wtGsKU,66
|
||||
Cython/Includes/Deprecated/python_exc.pxd,sha256=irWdwDYRWU16-P54uGDNfUSUtkL5Sj_1zBDWext_80g,65
|
||||
Cython/Includes/Deprecated/python_float.pxd,sha256=v1Hbpd4SF3hSF7ZL_olMaYJzmBNA9jWn0eO9ggLBlvc,67
|
||||
Cython/Includes/Deprecated/python_function.pxd,sha256=lkYKySQy1W36hfyyAJsc3E-8d9bsx5k8OhIMFQ6k2jA,70
|
||||
Cython/Includes/Deprecated/python_getargs.pxd,sha256=NEdeqPqu4di0YJm_7yLfvuS903CAe4K2Pzb13TRfBdE,69
|
||||
Cython/Includes/Deprecated/python_instance.pxd,sha256=FX9UlYrSxDrzch7wUvh_Y5Ix-bsDYARkXzZJOg2FvEI,70
|
||||
Cython/Includes/Deprecated/python_int.pxd,sha256=Cwd4J4KTKjxwEMz1BbCso0g0pOID9AnySKOC1g0kLqA,65
|
||||
Cython/Includes/Deprecated/python_iterator.pxd,sha256=nPJ0nKSmnUVzI1SPrTSt9wSD7SQILyhONJdP0H_-FGc,70
|
||||
Cython/Includes/Deprecated/python_list.pxd,sha256=VHpylsg46-5Ud8rwlPe63bb3zSToXm9R_fPorZrJsUE,66
|
||||
Cython/Includes/Deprecated/python_long.pxd,sha256=pg8hOKNoKaW-Mslugzeq6NCeznJw939LT24AVQn_cqE,66
|
||||
Cython/Includes/Deprecated/python_mapping.pxd,sha256=AZtJdYm37glDSNChduAsgavz-_DPDkxxQEAO9lDGy84,69
|
||||
Cython/Includes/Deprecated/python_mem.pxd,sha256=Mxidel5P4yuJxJOvoYr0PN1FD78oCOIJUEMPYMYU7lE,65
|
||||
Cython/Includes/Deprecated/python_method.pxd,sha256=x5ye5_8KqtsW2HrEon5NdFJmIkmVDV1KeVpFsuC2UZE,68
|
||||
Cython/Includes/Deprecated/python_module.pxd,sha256=lKu5VYCgC6S7LSgFa22V2YTY9JfML0vABDZpChhxs60,68
|
||||
Cython/Includes/Deprecated/python_number.pxd,sha256=X4MxGoITZuJNPtC2cFJ8lQwui8MOC6rQfEDbFIcWA9k,68
|
||||
Cython/Includes/Deprecated/python_object.pxd,sha256=qr2OwYVot4ELK3_-mCfaktXgLJEaKWDyCEblQ2vXV-E,68
|
||||
Cython/Includes/Deprecated/python_oldbuffer.pxd,sha256=QyY4Vn5-cFaOt0oZ27GuRXa3tLawgMZN8KMamn9F1yo,71
|
||||
Cython/Includes/Deprecated/python_pycapsule.pxd,sha256=tHJfhgm1TrSwJQwQFdhwP7YE7oQFiegxhNhgCDmlB6A,71
|
||||
Cython/Includes/Deprecated/python_ref.pxd,sha256=wv39G35V7tN5sIhcL1APpe5NuhCwYwVy6X5DPPm5g5A,65
|
||||
Cython/Includes/Deprecated/python_sequence.pxd,sha256=9ycCua1ODfECKPd56_GBmeqzWrfdqmkjhbEmdt87NC0,70
|
||||
Cython/Includes/Deprecated/python_set.pxd,sha256=_Z5KVXs0V_T8fpgLX-2LbDAZIY1HnuhO-eTUHHRYwu0,65
|
||||
Cython/Includes/Deprecated/python_string.pxd,sha256=6VgAehwW9PcUC9Kp_HbRVMYPeF_Q-L8yr9o2ezuTzys,68
|
||||
Cython/Includes/Deprecated/python_tuple.pxd,sha256=_ZTQh7dRBmrRs9mtmOFjP37d0IFItxs20kzFtKtkY-g,67
|
||||
Cython/Includes/Deprecated/python_type.pxd,sha256=2OKmEdSqoyK8fXttlHG3NRguZ-ZikUUet-kjKLq-eEU,66
|
||||
Cython/Includes/Deprecated/python_unicode.pxd,sha256=TF8-N0un1WdyccTDo9hZVABc53SYzKnC3MEKrGb3vV0,69
|
||||
Cython/Includes/Deprecated/python_version.pxd,sha256=ZXrK0UGUt8vHbYPxm7PTdhMe1_h7Yj6Lo74oFxjnNns,69
|
||||
Cython/Includes/Deprecated/python_weakref.pxd,sha256=CUWMSmClrWPoTnlClOFCSHa6Xd55qDgIlcDCD6tfEhM,69
|
||||
Cython/Includes/Deprecated/stdio.pxd,sha256=lNc2YuvWJ-LNSSdN7adDo1lf-C2M0r10hH4bysha9Sg,64
|
||||
Cython/Includes/Deprecated/stdlib.pxd,sha256=PbCbjT8MjDjVRjx5Rod79gi22-9YI35jTulePAKCPXE,65
|
||||
Cython/Includes/Deprecated/stl.pxd,sha256=tHpByeYgNiclr3YtCdKKAeEs3CHJflqacC7YgV7YN8k,2187
|
||||
Cython/Includes/cpython/__init__.pxd,sha256=8URNRvb7JkYhqDZv2J0bVsdeZBEJBu7u2QFYkDyXPG8,8254
|
||||
Cython/Includes/cpython/array.pxd,sha256=g6apBiXJG_7a0mjGqkFaqlcQjsg64uKK1VlXFFyXVCk,6056
|
||||
Cython/Includes/cpython/bool.pxd,sha256=FaNn8K-Toq8FAws8BguKMk0IPM7IJm9IiUUGARSrKYk,1359
|
||||
Cython/Includes/cpython/buffer.pxd,sha256=wm7aHygGUof_H3-JyICOek_xiU6Oks178ark1Nfk-a0,4870
|
||||
Cython/Includes/cpython/bytearray.pxd,sha256=m0VdoHgouF1T0VtRjFLXZ5fi22vaMdVwFWpF3IxB6m4,1443
|
||||
Cython/Includes/cpython/bytes.pxd,sha256=tGLuiBMzQjurK_pq77CM7P0C-Hn0KUIDZCXW9QvlJAI,9906
|
||||
Cython/Includes/cpython/cellobject.pxd,sha256=DXdTjSN1RP1m4CsaGuggyIA1nGiIO4Kr7-c0ZWfrpRo,1390
|
||||
Cython/Includes/cpython/ceval.pxd,sha256=h6fBetZCUvWTcCn3bkXZg2kqnIuyC5ZSChyhOocxVus,236
|
||||
Cython/Includes/cpython/cobject.pxd,sha256=ZeMdbpZLqpcTywdv2VoppMTWD4X_yghL6Qox7LVfOyg,1524
|
||||
Cython/Includes/cpython/codecs.pxd,sha256=3fyudEljkNGQ7e3dJPst6udXGcAeNKvlMK9U8EB1gXc,5084
|
||||
Cython/Includes/cpython/complex.pxd,sha256=-bu0Cq91tS_U5tTra18S0jqt1FgSJTHXJ5J8rk-MOAA,1777
|
||||
Cython/Includes/cpython/conversion.pxd,sha256=dbbFuZJF0SscmcaNCUf0tlBQDRdKYf5tH8yzhTU_XYI,1696
|
||||
Cython/Includes/cpython/datetime.pxd,sha256=wQqB8i3tMZOTw9qrLdbHJRkxgZqscGEqmq0tIDfkkqw,6776
|
||||
Cython/Includes/cpython/dict.pxd,sha256=F-mrlcAfNmTSUkpJed63bp1IaO0cwG56t_DLk7f0xv0,6877
|
||||
Cython/Includes/cpython/exc.pxd,sha256=29-bGESwfoMqx1XU3MMggkIr8pz_l0UPruzy6KIzHxg,13606
|
||||
Cython/Includes/cpython/float.pxd,sha256=RD1qEAUocXG9qXrRiT8aCSSfGEyTzjTc9HQkv5xg1ZE,1424
|
||||
Cython/Includes/cpython/function.pxd,sha256=IoJUprbz8F10DEKh-vSSpY6nWkCHw7SqG9p2f-4gHek,2671
|
||||
Cython/Includes/cpython/genobject.pxd,sha256=emC1JPgkuvBbGC0rgeZapKDaXYEj48uWiDC-xF0Mx2I,1052
|
||||
Cython/Includes/cpython/getargs.pxd,sha256=268twKzdiAkQMXMsetNiNlNqaqzlKtiBENKbhOHd8x4,775
|
||||
Cython/Includes/cpython/instance.pxd,sha256=qCbxPeHKOJbuszDu3UEaI-KLX9lTopuaNCcpoHJ9ngU,985
|
||||
Cython/Includes/cpython/int.pxd,sha256=d9a0zUw_M3pRycCESWIjtfXWRvdvFOWxjdOjkcbX2gs,4131
|
||||
Cython/Includes/cpython/iterator.pxd,sha256=o52mLHbdm14Kqant2hR2zAdYzqK4fkSWZtBcRmpoP-I,1319
|
||||
Cython/Includes/cpython/iterobject.pxd,sha256=5UEZZwG5zyzxoCpknoQuh91zPUV11Uxr6F1taJdTv8k,1036
|
||||
Cython/Includes/cpython/list.pxd,sha256=t-xo7ROcewe6-0ztrNjsxMKV2KxD-ILUzemQ2tTuI7E,4084
|
||||
Cython/Includes/cpython/long.pxd,sha256=d6jHN1XJj7WL5PPAUK8U93IPyjWtlTmyhrBEVmxmGF8,7051
|
||||
Cython/Includes/cpython/longintrepr.pxd,sha256=czvKr3fQdYIwIRu3gojXssT9LFXH-nstM7f_lPt7lE4,480
|
||||
Cython/Includes/cpython/mapping.pxd,sha256=OIdvNVUoIpVCSQnkbLceTicSN0D_jRw6wQmbtxtxKuQ,2693
|
||||
Cython/Includes/cpython/mem.pxd,sha256=AWVinanXFBZXvU141we2dD8dkOqMJ8W3KAAzpBJqB5g,5386
|
||||
Cython/Includes/cpython/memoryview.pxd,sha256=l97J5-hbH3hp9aMbdXp3n73hJFNNsng6uyh40pc8P7I,2504
|
||||
Cython/Includes/cpython/method.pxd,sha256=UWXflhIlP4y7B5XDbH9rQ15iADciGW-iqV1-dlw2Wwg,2196
|
||||
Cython/Includes/cpython/module.pxd,sha256=Vc0Up7q1Mir38bN293E8RMugxWfuzjLFHM4g2dviPBM,9226
|
||||
Cython/Includes/cpython/number.pxd,sha256=tYJ0nn0k_llUx3ilniW9iXd2rKVejA-J5UUiIJ36Kww,11922
|
||||
Cython/Includes/cpython/object.pxd,sha256=AK5D-LrDbvisO6wpkh29G6xjA71sBF_KfKUyn0k2hzg,18366
|
||||
Cython/Includes/cpython/oldbuffer.pxd,sha256=v0-YZ_Iwwj3ZQdM8VE5NPTQcbBlJdWwJGtNO9DonGgw,2916
|
||||
Cython/Includes/cpython/pycapsule.pxd,sha256=8vySKea_zyTZZ1H39lICYkpJmnYTqZrun3DLf7d2294,5692
|
||||
Cython/Includes/cpython/pylifecycle.pxd,sha256=LziJZHclGdtsr3yT28fULHNZ_n67bs1DmI9s8YzrBGg,2000
|
||||
Cython/Includes/cpython/pystate.pxd,sha256=xgf1BBkv36qvqMaR77zZWYOuonAwe4RfNKE2g91A6fk,3683
|
||||
Cython/Includes/cpython/pythread.pxd,sha256=0375TaYmtNCDDkWBh9WY4oJ_jhoTxhu_RR5QiOsXmYg,1946
|
||||
Cython/Includes/cpython/ref.pxd,sha256=2AmgyGDhwA4scts0jcBTdGTCG0b2P8-eYAKFJk44x0I,2557
|
||||
Cython/Includes/cpython/sequence.pxd,sha256=iTp3C6wOvTdvjLmdj3k9GqQqCGDlQFpzWi07wVQqSS4,6008
|
||||
Cython/Includes/cpython/set.pxd,sha256=ewHRPVMbHUGDInZ3NziisCq68LvtmEJ-SXFbzmuJxLc,5383
|
||||
Cython/Includes/cpython/slice.pxd,sha256=Rzgn8diAsN7lS2xGTq4VZucV3ziFNra4oz4tKGEAkMo,3111
|
||||
Cython/Includes/cpython/string.pxd,sha256=EKjDGFnPcjnkndwGMJqRrszDV390Mc6o7AADChnNCiA,9944
|
||||
Cython/Includes/cpython/tuple.pxd,sha256=eOLfH75ftJeYszztGFWWZP7LnyFOgw8GNuE7PQ9hAvs,3206
|
||||
Cython/Includes/cpython/type.pxd,sha256=FOypwX0ZYamPc4uO8bejzO-HzgiaSRaXEPsxxxPIneI,1831
|
||||
Cython/Includes/cpython/unicode.pxd,sha256=ymXBenUGMeamRMv9bQ8_FbwIeId7Xi7Xjzs0Nhravyw,27009
|
||||
Cython/Includes/cpython/version.pxd,sha256=l5KXt04isEv3qbGRJZ8fNlCYGO24HsA2l4EM3RxTEhE,847
|
||||
Cython/Includes/cpython/weakref.pxd,sha256=UU9H_ovHG07FFgP_kY2xhGv3yJDr_8iujCZnxH2jnlo,1984
|
||||
Cython/Includes/libc/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Includes/libc/errno.pxd,sha256=j5hcKx7zinivU2b6SFMy8LZ9sJIQY5XLrp9cQUKv5AQ,2050
|
||||
Cython/Includes/libc/float.pxd,sha256=IhvZJljpTG0fZtcIp7EBO2Sqddozxoxwj4RFNVcKLpY,966
|
||||
Cython/Includes/libc/limits.pxd,sha256=xHlIyuDIKpjqclvRRYzZIcfd5G1re5QtbmoDMqZR_Ec,621
|
||||
Cython/Includes/libc/locale.pxd,sha256=sixG8EJ6wiVb0HIR1LWJ3lXTjTv463GJ9C_40HRovN4,1140
|
||||
Cython/Includes/libc/math.pxd,sha256=51YUxSe01R96_rr3sj4n4MLW-eOmQbcwdNn8YthTxqg,2948
|
||||
Cython/Includes/libc/setjmp.pxd,sha256=XRh-gSuhvFLl0nRvz5OhSWYe9eqX2attAck3JI7mwa4,297
|
||||
Cython/Includes/libc/signal.pxd,sha256=XOScPDA5vzlfEmu4D7DFT1-5Eu3qMpYdUarjt-fqlbw,1170
|
||||
Cython/Includes/libc/stddef.pxd,sha256=0rCyoocCfDL-1OQo3pxHQ-6fW20SAYktOLPoa4d97w8,164
|
||||
Cython/Includes/libc/stdint.pxd,sha256=qHJXzpWCrbvJWSaHYZL27VJPupQreTZl9VGj0jgLdRU,3449
|
||||
Cython/Includes/libc/stdio.pxd,sha256=qUaxEwNrQl1-4yHLorzzJZ-a-y5_-Rm_m7Z5meaRqH0,2476
|
||||
Cython/Includes/libc/stdlib.pxd,sha256=p62xq2XfB24WfNCjRXgD6cOYoRuV47AnYijkjWv4ugE,2444
|
||||
Cython/Includes/libc/string.pxd,sha256=tzYGbRrnccedFLes-KGgJqM0FEtwHF_q4f2fqltNvyE,2038
|
||||
Cython/Includes/libc/time.pxd,sha256=-IRH7fTq3wKBKmQQnpZRhaLsnl7D_qXFz_4BLB9O3u0,1317
|
||||
Cython/Includes/libcpp/__init__.pxd,sha256=PCx8ZRfOeoyMRu41PPlPY9uo2kZmt_7d0KR4Epzfe7c,94
|
||||
Cython/Includes/libcpp/algorithm.pxd,sha256=-2V0oR_cFbHHzeWT9RcfLvi5Oy-s_V2lO3OI6ZtX6fM,1770
|
||||
Cython/Includes/libcpp/cast.pxd,sha256=En4LBubdinfpm9Rel077tK_LGwg_3k4FAu9mlIbKjuw,501
|
||||
Cython/Includes/libcpp/complex.pxd,sha256=IjL8y9sAglhGbTKhqsJbW0mgMTYEUbYM1ekr5VDhQgY,3012
|
||||
Cython/Includes/libcpp/deque.pxd,sha256=aWqZ9j3OgQuqFLkqRO_U2FIwbSe2fKmmYDRAfD0vGqU,3106
|
||||
Cython/Includes/libcpp/forward_list.pxd,sha256=-So1ExEOkoPfsSdMlJSlI5665-zyWLMoUxlmm2Dlokk,2392
|
||||
Cython/Includes/libcpp/functional.pxd,sha256=BXPYkffEOlKO1erTLqlkBLex6Gb5byDMF4hq_MZ2aVI,381
|
||||
Cython/Includes/libcpp/iterator.pxd,sha256=mVc1rsAYfn_ARrdQ4JG-Ut5il5ynIa1CRXLk8Be8Zks,1432
|
||||
Cython/Includes/libcpp/limits.pxd,sha256=RKV3wPvk4tV_vX5CYQRJIK5m5xXav7SeBxltlLyk8es,1661
|
||||
Cython/Includes/libcpp/list.pxd,sha256=rGQfB3_mDcRkGKtMBuvDQvAPmgzR5jxSf3eOSRgR4YA,2658
|
||||
Cython/Includes/libcpp/map.pxd,sha256=GF2sDnFBHZoU3Rcuo1rn6yKh45nhkX0_iH29xj581ow,2551
|
||||
Cython/Includes/libcpp/memory.pxd,sha256=Hj20aSnmUTPAhFCrlmF_aeHJKiMiZ2bDKhaYn2yybJo,3600
|
||||
Cython/Includes/libcpp/pair.pxd,sha256=UBJXw43uHkDlNsr0Pu1aP5tZ-ILXhUAyOLam2qdWmZA,27
|
||||
Cython/Includes/libcpp/queue.pxd,sha256=FbL4Q7C3lgtZ2YzictU1XBXzQ7G-6y9i_7l2eqzA3Xc,649
|
||||
Cython/Includes/libcpp/set.pxd,sha256=3y5Ir2TjGD7g3VRvlkXV1a3V3ZYzJvwOAfeTv8ucOCw,2170
|
||||
Cython/Includes/libcpp/stack.pxd,sha256=zM3SQOqMWONVqud13ag3bUupA-ozU_YMq4Ad2QkL6fI,292
|
||||
Cython/Includes/libcpp/string.pxd,sha256=zsvzyW6IggIDFqcF-UuLjxiNAHPtToAoc9VhKKG2U5A,8731
|
||||
Cython/Includes/libcpp/typeindex.pxd,sha256=mIHr5Mq6Lol0SlzqeK6w_giVERh3uAjZm78yPDLXzc4,524
|
||||
Cython/Includes/libcpp/typeinfo.pxd,sha256=tITsqurrdaZjsEGFksem9xZtVhSxQRxHZxcoC-4Y-DY,304
|
||||
Cython/Includes/libcpp/unordered_map.pxd,sha256=eNna4hRAucQLnliBfEMu7Unfd_lB18I42iwKmsCO0-M,2867
|
||||
Cython/Includes/libcpp/unordered_set.pxd,sha256=eUYSOMT5Gt8kZWCUKezQGyXWzatEyNg6-nmAlmcBo-k,2622
|
||||
Cython/Includes/libcpp/utility.pxd,sha256=hTbvp7c12pnU2yvzzMvflZB-MAc_--3xh3PXtD_VIwg,1040
|
||||
Cython/Includes/libcpp/vector.pxd,sha256=GYqLb74owhMmNQHUCcZSxGcYPgNuw6qULsfWKr7g6OQ,3350
|
||||
Cython/Includes/numpy/__init__.pxd,sha256=CbLwvA4u-xj7RHxbO9Hs2o6hXd7GaJJlGEn9XJVH4c4,38138
|
||||
Cython/Includes/numpy/math.pxd,sha256=qZEdamaPgCFW4J7Itc6BWgOrQSKZdxDT6kbU_gqx2g4,5807
|
||||
Cython/Includes/openmp.pxd,sha256=orCIBYFuVPtLdRdhhCm5uhGbeV_fgVCA2Jk2Bts1e2g,1713
|
||||
Cython/Includes/posix/__init__.pxd,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Includes/posix/dlfcn.pxd,sha256=2IFcGBfZEmArdE0BxB71eT_Yb7n9STaVM11AtUcg_pE,355
|
||||
Cython/Includes/posix/fcntl.pxd,sha256=oRL8-OsgcplHMGcYq5-W5twISvxK-vDfzAaEfuQHC-4,1194
|
||||
Cython/Includes/posix/ioctl.pxd,sha256=2RC5zejPOCTkarDZM_6Vd2wc4oBuN7iaiL_C5MPBs90,99
|
||||
Cython/Includes/posix/mman.pxd,sha256=juJcLi92N9Bc6L2p4zrUmYQIgNmrTsZ6hExbl1181pc,3362
|
||||
Cython/Includes/posix/resource.pxd,sha256=MQe1bCTYQFVMsago3pgOvR6t6NElQElg7rhVANxYRcE,1254
|
||||
Cython/Includes/posix/select.pxd,sha256=e4nhGHR8TRw6Xs9du5JoFtkd8U9sm3gX_BHq2FfmU6E,546
|
||||
Cython/Includes/posix/signal.pxd,sha256=wFJI5UthdtU9mZWjEBeZ9IIfeX252JVwDk2tsbW_q3U,1876
|
||||
Cython/Includes/posix/stat.pxd,sha256=ZOcPCpXnxlRRHcUkvg559hrFfB75uTbIYRWoQeyBCYs,1734
|
||||
Cython/Includes/posix/stdio.pxd,sha256=K8DEH38hWMvy2A8zcKbHRrHSGsgwTIrQ9qCzU-0cWS0,1054
|
||||
Cython/Includes/posix/stdlib.pxd,sha256=uGRPa00_HWZ6Chv5E13F96eut0xWHSfR7IioK9rKVLY,934
|
||||
Cython/Includes/posix/strings.pxd,sha256=GNEteqND2wgXXSvkv6U9eKSC9oIom3C7o2zQ6W_J_S4,374
|
||||
Cython/Includes/posix/time.pxd,sha256=wPUD7AjxpxmnUYmogTMFjroB2VzcPh8-b_8NEj-yG14,1980
|
||||
Cython/Includes/posix/types.pxd,sha256=tWEWxST4EGHIgYS-Ce2SGjZ-KgmM2SVe1eggdcgv3JQ,1162
|
||||
Cython/Includes/posix/unistd.pxd,sha256=w9B4d9NaXBsQ62XOr2xe9UFPGewmEk5BG6sqiRWdoM8,8061
|
||||
Cython/Includes/posix/wait.pxd,sha256=WNogQvKu2hMfEQiCyaANfVWFnyJSk6TxBU0c6npeJrA,1244
|
||||
Cython/Plex/Actions.cpython-311-x86_64-linux-gnu.so,sha256=vygw_HNG3bNLaVLv5ZLizRAFbOe4i8KfQQI33dznr4Y,73600
|
||||
Cython/Plex/Actions.pxd,sha256=FC-6ffzWR4i3rR6VSL2C64Xxs1qBhpBEzRsU7WpLn1Y,585
|
||||
Cython/Plex/Actions.py,sha256=Caxkx8Kup9m4sx24ZcDTq-fAfPGG06TAHu2NI1D9zPs,2545
|
||||
Cython/Plex/DFA.py,sha256=w4vl2ejXv6ptILtkTCbB8NcvF8ylwc6DaQ2gPFrWuo4,6012
|
||||
Cython/Plex/Errors.py,sha256=As5uuGmqZe4w0B7Dm981lZTnDG-nlXSHYqiGUKnhrrY,1169
|
||||
Cython/Plex/Lexicons.py,sha256=ay3yy9fqI5y5lfgpJ4ubBjYZQ53gFDVgNGbmoSl5DxI,6907
|
||||
Cython/Plex/Machines.py,sha256=bIKg3-yxD_r7x-zEowJ7EsPBWlgXm_XhIozqsLQBeTk,7760
|
||||
Cython/Plex/Regexps.py,sha256=qaP-Fr-GgKNmBVsMyXO3ltl2HH1JQcQiFmX2oyUyeOA,16208
|
||||
Cython/Plex/Scanners.cpython-311-x86_64-linux-gnu.so,sha256=6Rn0pAlvF6XyZzN2YutKv14Lq3lErOWUN-gWcA1gq84,107536
|
||||
Cython/Plex/Scanners.pxd,sha256=oSfcDUZ3syc2ag73udwU5xoaIGDxiNd8a2F_LLw5PzY,1481
|
||||
Cython/Plex/Scanners.py,sha256=-TXAxKW43ZbQNCSEkMWEJ0SmqYVVCkSOT9UngOCRZnQ,12259
|
||||
Cython/Plex/Timing.py,sha256=-VgQveS-Ip_2ErjrVxh4w7cXpyVBkUaSaiLadyD3bw0,472
|
||||
Cython/Plex/Traditional.py,sha256=cAT-pZnqIwCJaqgSqgKODSznFZ5DunUw_MLWx8Y650c,4120
|
||||
Cython/Plex/Transitions.py,sha256=Tvp7cFXR3ZBPPHm0TAhUMC_-uiRR9YdOkF4t0wtk-f0,7187
|
||||
Cython/Plex/__init__.py,sha256=dvMeQpSyZE75W0gkf4Xo5LAxgQLNhkAXiQoIOtcOkZ0,1282
|
||||
Cython/Plex/__pycache__/Actions.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/DFA.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Errors.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Lexicons.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Machines.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Regexps.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Scanners.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Timing.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Traditional.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/Transitions.cpython-311.pyc,,
|
||||
Cython/Plex/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Runtime/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Runtime/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Runtime/refnanny.cpython-311-x86_64-linux-gnu.so,sha256=Qfua36JJnM1F_vLiJZLT1rJTJ5vmFvMB_asuKTxBWmk,80888
|
||||
Cython/Runtime/refnanny.pyx,sha256=f2p1_0YxK25lm8Qfsu2ytvl0Im7GYyix1Q9krEBwC6c,6279
|
||||
Cython/Shadow.py,sha256=_nEqDiS_yIFXvqWwEpoq9nh1HqG6hKCAEANmKSFXoOY,13007
|
||||
Cython/StringIOTree.py,sha256=GX-TWn9XHwY5ecb4in8ovsTS5CtPTsSxZpanLWmQxgE,3336
|
||||
Cython/Tempita/__init__.py,sha256=YHujYHiLoYUwFNNswJCgzSrDuie3sV08JsWT9Nbmp78,152
|
||||
Cython/Tempita/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/_looper.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/_tempita.cpython-311.pyc,,
|
||||
Cython/Tempita/__pycache__/compat3.cpython-311.pyc,,
|
||||
Cython/Tempita/_looper.py,sha256=jlStYhz9Pgp6NatX86k-netBNBmvwaeWxCRS_S8vcIM,4168
|
||||
Cython/Tempita/_tempita.cpython-311-x86_64-linux-gnu.so,sha256=lkmTWU0P07mALomp3r82Fm7CmjeeE4cICTTQuIgroS8,600800
|
||||
Cython/Tempita/_tempita.py,sha256=4gnJhuVIsGciu_5Besbvw26g82Pm7CiXazMghZO3ejs,39588
|
||||
Cython/Tempita/compat3.py,sha256=cjW1y266vRF5Xvh8kAu7_qHGT8AGGu2kGSJRK6DI-0E,903
|
||||
Cython/TestUtils.py,sha256=fzpic9xU-LP0wempXqwUQWZapBvXnFgbW_W9--IKpIA,7979
|
||||
Cython/Tests/TestCodeWriter.py,sha256=qKad43J3hN7PLp7mVbEDESt96qsk8y3ELRwwIp9jnNw,2316
|
||||
Cython/Tests/TestCythonUtils.py,sha256=XF4Fw4J5HZ4jUPLVv7ea8ZZcl2i9yXn5cx27WTtrcmU,474
|
||||
Cython/Tests/TestJediTyper.py,sha256=F6MUG8SdzGXQwkbw6Wv1PqVlmlIT1z_7lH2buVOFT_I,6996
|
||||
Cython/Tests/TestStringIOTree.py,sha256=vTuu3z32WTcmJaf0fBq62NMghYtaPL2rRnfdl2WM--4,1946
|
||||
Cython/Tests/__init__.py,sha256=jOqtmPLCvMCq0xVMwGekuLpBmVgq0xtPFmUePySdOjs,13
|
||||
Cython/Tests/__pycache__/TestCodeWriter.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestCythonUtils.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestJediTyper.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/TestStringIOTree.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Tests/__pycache__/xmlrunner.cpython-311.pyc,,
|
||||
Cython/Tests/xmlrunner.py,sha256=N1Z_C4Q_rSWFNQxm3L99qX-SaIWEksXbmfXOd_srg3s,14801
|
||||
Cython/Utility/AsyncGen.c,sha256=iwNH8NdegHZYafEgBpErk6erU5o6GKVtHgqd3Vq9kNc,41023
|
||||
Cython/Utility/Buffer.c,sha256=VUF4xHKJGX7QMTvpJO40aI1JUL-SERLEvlXXXEk2dHU,29654
|
||||
Cython/Utility/Builtins.c,sha256=gYObNoiK_NVWRuzSFRePMb-dtw-XCp_Dx_Ztjmpq7as,16818
|
||||
Cython/Utility/CConvert.pyx,sha256=fbZVHvm2vlWj2rgm8ajBt5jrbN30nY7dEmHlBCGomlU,4338
|
||||
Cython/Utility/CMath.c,sha256=GIc7gd2WzaZryDJM3tefqXifLJpUJs6_T_c_mFrr-s8,2566
|
||||
Cython/Utility/Capsule.c,sha256=SOeU7E7T7piQEx894T2QFH2RlSG-MmsiyuY4lVN1yso,505
|
||||
Cython/Utility/CommonStructures.c,sha256=p65HHgTrf7h7Tj7JK7tIgkLrrCrjouL8HL90EHfoMoU,2558
|
||||
Cython/Utility/Complex.c,sha256=6YPGWtZUNI2ig5LRn3KWURB7z3cfhSNasW1S1Ejl7ao,10062
|
||||
Cython/Utility/Coroutine.c,sha256=YmFtN3eY_MAxEktax_xwsdPhL8qUC86jUgPe1mdlcdU,89569
|
||||
Cython/Utility/CpdefEnums.pyx,sha256=XMg8sdltQSNj2wGVfnHIWRvyHFCcLK8ZfpKznKi4lhY,1893
|
||||
Cython/Utility/CppConvert.pyx,sha256=-e5i3_J1SS_GbctsflQwylx9cqdk_CJ2SfQSEDHa71k,6098
|
||||
Cython/Utility/CppSupport.cpp,sha256=NTnSRCmi2PHuT3J6Qy15xMZGx0bf9l-MaxAbW6OVk6s,2234
|
||||
Cython/Utility/CythonFunction.c,sha256=dbUNvdQQN9G1Mzm5McRQFRDmu81R9ZSoScx6J-W71Bc,46494
|
||||
Cython/Utility/Embed.c,sha256=sMDv7XVJswaGRTQbQHtEDThZaAmvbn-6yeIqrUzboL4,6854
|
||||
Cython/Utility/Exceptions.c,sha256=OqbNVb-LWX7hR67zJq6vI-rBj1eFkSmlVjAsfwnNrtA,27285
|
||||
Cython/Utility/ExtensionTypes.c,sha256=doPkCDEsyClYfDa1386DxwVj0D4jYvKQlnbXQo2REHM,11664
|
||||
Cython/Utility/FunctionArguments.c,sha256=IH9Y5aV_tNrJLo_CWHskEnFai9fp9cKLvRkIZYl2UGQ,12040
|
||||
Cython/Utility/ImportExport.c,sha256=hs-AMn_uWVhQwQQds2PG_szRSe8hZIm8heMh1-XMQzc,21760
|
||||
Cython/Utility/MemoryView.pyx,sha256=5WnL5DJF0nuSQBN5oBeEHFYE8aYm4C0-hY9Azm-toY4,49749
|
||||
Cython/Utility/MemoryView_C.c,sha256=wlIKZ6UxkH--BFA-ENr83FtUw0KtStkqjAvB24Q0y_s,29072
|
||||
Cython/Utility/ModuleSetupCode.c,sha256=jccP_Aor8OXo6912KyPivALTofiCgZ58ibHj3WwUIw8,58449
|
||||
Cython/Utility/ObjectHandling.c,sha256=VSgqd__Zi1UgO5YpMl3cZTg5NLXTixgLZ-IgO2HJpT8,89025
|
||||
Cython/Utility/Optimize.c,sha256=xtKZ8WhOe8l4UYONGVDilNFhzGMueJxA7nYJn8QIWmc,45184
|
||||
Cython/Utility/Overflow.c,sha256=_KXlJsbMIi-jzdCotwxkN6mtqo6jHRNnPJ1ZKwXVhpE,12424
|
||||
Cython/Utility/Printing.c,sha256=o8XnfjNIT8Ub5KY4FAp_FNw-OE3xqjy0MgmYWgDcWao,5103
|
||||
Cython/Utility/Profile.c,sha256=3aq_eC7h_nUnZe-Np7td20ublCC4OowfHoV3FoW9UnU,17922
|
||||
Cython/Utility/StringTools.c,sha256=UyWngTrFElFXl4NIi7n36QCM7HL3MCNGhvqpPYLLY6o,42215
|
||||
Cython/Utility/TestCyUtilityLoader.pyx,sha256=91lWWJub7l_6xNn3ncrvQZZ94RpkQzEx2NtAaFpvrxY,152
|
||||
Cython/Utility/TestCythonScope.pyx,sha256=HQm5E5Eehr3tkDDURURyVnDputKG3-Wn2k2aIAoru9g,1595
|
||||
Cython/Utility/TestUtilityLoader.c,sha256=dGy6ZWL2kBqtmUY7kF75UEox5kadQZ__BmZKscwg2aY,279
|
||||
Cython/Utility/TypeConversion.c,sha256=0K3erVzNT9lY-jEygTxsarAirETGZcOzvSK-VDs5EJY,36302
|
||||
Cython/Utility/__init__.py,sha256=t2bpY-TYSX8lJdbKuBFJ1kBfpWVzgGw4xoZlCKfyj_s,1159
|
||||
Cython/Utility/__pycache__/__init__.cpython-311.pyc,,
|
||||
Cython/Utility/arrayarray.h,sha256=3Ll8Gd_S4rv8HaTfg5i6-aaoB9taI1vzwTp7NeA7Wy0,4089
|
||||
Cython/Utils.py,sha256=pl95NbQHXtG8mkn0qnjunmYgoUOJa2PZ5tumOOpNRVU,14118
|
||||
Cython/__init__.py,sha256=GMnkoIas6hfN_meqZAJF9BEs1NuY4-4B2L0Uls7hXaA,358
|
||||
Cython/__pycache__/CodeWriter.cpython-311.pyc,,
|
||||
Cython/__pycache__/Coverage.cpython-311.pyc,,
|
||||
Cython/__pycache__/Debugging.cpython-311.pyc,,
|
||||
Cython/__pycache__/Shadow.cpython-311.pyc,,
|
||||
Cython/__pycache__/StringIOTree.cpython-311.pyc,,
|
||||
Cython/__pycache__/TestUtils.cpython-311.pyc,,
|
||||
Cython/__pycache__/Utils.cpython-311.pyc,,
|
||||
Cython/__pycache__/__init__.cpython-311.pyc,,
|
||||
__pycache__/cython.cpython-311.pyc,,
|
||||
cython.py,sha256=z2AtgHBGh0x0h0ZcGje7IhYlR6nGH_MmOh1fFMjqYn0,520
|
||||
pyximport/__init__.py,sha256=9hOyKolFtOerPiVEyktKrT1VtzbGexq9UmORzo52iHI,79
|
||||
pyximport/__pycache__/__init__.cpython-311.pyc,,
|
||||
pyximport/__pycache__/pyxbuild.cpython-311.pyc,,
|
||||
pyximport/__pycache__/pyximport.cpython-311.pyc,,
|
||||
pyximport/pyxbuild.py,sha256=TiAkhtSxSbRW04JKtgO3FP3hfVzQ1mjjzCh5PqZDOrM,5702
|
||||
pyximport/pyximport.py,sha256=Vjxp3kbmFRf9j0ya4f0m0Ahytkjjmv2UkFueasXxL5A,23578
|
@ -0,0 +1,7 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.38.4)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp311-cp311-manylinux_2_17_x86_64
|
||||
Tag: cp311-cp311-manylinux2014_x86_64
|
||||
Tag: cp311-cp311-manylinux_2_24_x86_64
|
||||
|
@ -0,0 +1,4 @@
|
||||
[console_scripts]
|
||||
cygdb = Cython.Debugger.Cygdb:main
|
||||
cython = Cython.Compiler.Main:setuptools_main
|
||||
cythonize = Cython.Build.Cythonize:main
|
@ -0,0 +1,3 @@
|
||||
Cython
|
||||
cython
|
||||
pyximport
|
@ -0,0 +1,142 @@
|
||||
"""
|
||||
Compile a Python script into an executable that embeds CPython and run it.
|
||||
Requires CPython to be built as a shared library ('libpythonX.Y').
|
||||
|
||||
Basic usage:
|
||||
|
||||
python cythonrun somefile.py [ARGS]
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
DEBUG = True
|
||||
|
||||
import sys
|
||||
import os
|
||||
from distutils import sysconfig
|
||||
|
||||
|
||||
def get_config_var(name, default=''):
|
||||
return sysconfig.get_config_var(name) or default
|
||||
|
||||
INCDIR = sysconfig.get_python_inc()
|
||||
LIBDIR1 = get_config_var('LIBDIR')
|
||||
LIBDIR2 = get_config_var('LIBPL')
|
||||
PYLIB = get_config_var('LIBRARY')
|
||||
PYLIB_DYN = get_config_var('LDLIBRARY')
|
||||
if PYLIB_DYN == PYLIB:
|
||||
# no shared library
|
||||
PYLIB_DYN = ''
|
||||
else:
|
||||
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
||||
|
||||
CC = get_config_var('CC', os.environ.get('CC', ''))
|
||||
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
||||
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
||||
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
||||
LIBS = get_config_var('LIBS')
|
||||
SYSLIBS = get_config_var('SYSLIBS')
|
||||
EXE_EXT = sysconfig.get_config_var('EXE')
|
||||
|
||||
def _debug(msg, *args):
|
||||
if DEBUG:
|
||||
if args:
|
||||
msg = msg % args
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
def dump_config():
|
||||
_debug('INCDIR: %s', INCDIR)
|
||||
_debug('LIBDIR1: %s', LIBDIR1)
|
||||
_debug('LIBDIR2: %s', LIBDIR2)
|
||||
_debug('PYLIB: %s', PYLIB)
|
||||
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
||||
_debug('CC: %s', CC)
|
||||
_debug('CFLAGS: %s', CFLAGS)
|
||||
_debug('LINKCC: %s', LINKCC)
|
||||
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
||||
_debug('LIBS: %s', LIBS)
|
||||
_debug('SYSLIBS: %s', SYSLIBS)
|
||||
_debug('EXE_EXT: %s', EXE_EXT)
|
||||
|
||||
def runcmd(cmd, shell=True):
|
||||
if shell:
|
||||
cmd = ' '.join(cmd)
|
||||
_debug(cmd)
|
||||
else:
|
||||
_debug(' '.join(cmd))
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
except ImportError: # Python 2.3 ...
|
||||
returncode = os.system(cmd)
|
||||
else:
|
||||
returncode = subprocess.call(cmd, shell=shell)
|
||||
|
||||
if returncode:
|
||||
sys.exit(returncode)
|
||||
|
||||
def clink(basename):
|
||||
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
||||
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
||||
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
||||
|
||||
def ccompile(basename):
|
||||
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
||||
|
||||
def cycompile(input_file, options=()):
|
||||
from ..Compiler import Version, CmdLine, Main
|
||||
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
||||
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
||||
result = Main.compile(sources, options)
|
||||
if result.num_errors > 0:
|
||||
sys.exit(1)
|
||||
|
||||
def exec_file(program_name, args=()):
|
||||
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
||||
|
||||
def build(input_file, compiler_args=(), force=False):
|
||||
"""
|
||||
Build an executable program from a Cython module.
|
||||
|
||||
Returns the name of the executable file.
|
||||
"""
|
||||
basename = os.path.splitext(input_file)[0]
|
||||
exe_file = basename + EXE_EXT
|
||||
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
||||
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
||||
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
||||
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
||||
_debug("File is up to date, not regenerating %s", exe_file)
|
||||
return exe_file
|
||||
cycompile(input_file, compiler_args)
|
||||
ccompile(basename)
|
||||
clink(basename)
|
||||
return exe_file
|
||||
|
||||
def build_and_run(args):
|
||||
"""
|
||||
Build an executable program from a Cython module and runs it.
|
||||
|
||||
Arguments after the module name will be passed verbatimely to the
|
||||
program.
|
||||
"""
|
||||
cy_args = []
|
||||
last_arg = None
|
||||
for i, arg in enumerate(args):
|
||||
if arg.startswith('-'):
|
||||
cy_args.append(arg)
|
||||
elif last_arg in ('-X', '--directive'):
|
||||
cy_args.append(arg)
|
||||
else:
|
||||
input_file = arg
|
||||
args = args[i+1:]
|
||||
break
|
||||
last_arg = arg
|
||||
else:
|
||||
raise ValueError('no input file provided')
|
||||
|
||||
program_name = build(input_file, cy_args)
|
||||
exec_file(program_name, args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
build_and_run(sys.argv[1:])
|
229
kivy_venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
229
kivy_venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
Normal file
@ -0,0 +1,229 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from distutils.core import setup
|
||||
|
||||
from .Dependencies import cythonize, extended_iglob
|
||||
from ..Utils import is_package_dir
|
||||
from ..Compiler import Options
|
||||
|
||||
try:
|
||||
import multiprocessing
|
||||
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
||||
except ImportError:
|
||||
multiprocessing = None
|
||||
parallel_compiles = 0
|
||||
|
||||
|
||||
class _FakePool(object):
|
||||
def map_async(self, func, args):
|
||||
try:
|
||||
from itertools import imap
|
||||
except ImportError:
|
||||
imap=map
|
||||
for _ in imap(func, args):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def terminate(self):
|
||||
pass
|
||||
|
||||
def join(self):
|
||||
pass
|
||||
|
||||
|
||||
def parse_directives(option, name, value, parser):
|
||||
dest = option.dest
|
||||
old_directives = dict(getattr(parser.values, dest,
|
||||
Options.get_directive_defaults()))
|
||||
directives = Options.parse_directive_list(
|
||||
value, relaxed_bool=True, current_settings=old_directives)
|
||||
setattr(parser.values, dest, directives)
|
||||
|
||||
|
||||
def parse_options(option, name, value, parser):
|
||||
dest = option.dest
|
||||
options = dict(getattr(parser.values, dest, {}))
|
||||
for opt in value.split(','):
|
||||
if '=' in opt:
|
||||
n, v = opt.split('=', 1)
|
||||
v = v.lower() not in ('false', 'f', '0', 'no')
|
||||
else:
|
||||
n, v = opt, True
|
||||
options[n] = v
|
||||
setattr(parser.values, dest, options)
|
||||
|
||||
|
||||
def parse_compile_time_env(option, name, value, parser):
|
||||
dest = option.dest
|
||||
old_env = dict(getattr(parser.values, dest, {}))
|
||||
new_env = Options.parse_compile_time_env(value, current_settings=old_env)
|
||||
setattr(parser.values, dest, new_env)
|
||||
|
||||
|
||||
def find_package_base(path):
|
||||
base_dir, package_path = os.path.split(path)
|
||||
while os.path.isfile(os.path.join(base_dir, '__init__.py')):
|
||||
base_dir, parent = os.path.split(base_dir)
|
||||
package_path = '%s/%s' % (parent, package_path)
|
||||
return base_dir, package_path
|
||||
|
||||
|
||||
def cython_compile(path_pattern, options):
|
||||
pool = None
|
||||
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
||||
try:
|
||||
for path in all_paths:
|
||||
if options.build_inplace:
|
||||
base_dir = path
|
||||
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
||||
base_dir = os.path.dirname(base_dir)
|
||||
else:
|
||||
base_dir = None
|
||||
|
||||
if os.path.isdir(path):
|
||||
# recursively compiling a package
|
||||
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
||||
else:
|
||||
# assume it's a file(-like thing)
|
||||
paths = [path]
|
||||
|
||||
ext_modules = cythonize(
|
||||
paths,
|
||||
nthreads=options.parallel,
|
||||
exclude_failures=options.keep_going,
|
||||
exclude=options.excludes,
|
||||
compiler_directives=options.directives,
|
||||
compile_time_env=options.compile_time_env,
|
||||
force=options.force,
|
||||
quiet=options.quiet,
|
||||
depfile=options.depfile,
|
||||
**options.options)
|
||||
|
||||
if ext_modules and options.build:
|
||||
if len(ext_modules) > 1 and options.parallel > 1:
|
||||
if pool is None:
|
||||
try:
|
||||
pool = multiprocessing.Pool(options.parallel)
|
||||
except OSError:
|
||||
pool = _FakePool()
|
||||
pool.map_async(run_distutils, [
|
||||
(base_dir, [ext]) for ext in ext_modules])
|
||||
else:
|
||||
run_distutils((base_dir, ext_modules))
|
||||
except:
|
||||
if pool is not None:
|
||||
pool.terminate()
|
||||
raise
|
||||
else:
|
||||
if pool is not None:
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
|
||||
def run_distutils(args):
|
||||
base_dir, ext_modules = args
|
||||
script_args = ['build_ext', '-i']
|
||||
cwd = os.getcwd()
|
||||
temp_dir = None
|
||||
try:
|
||||
if base_dir:
|
||||
os.chdir(base_dir)
|
||||
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
||||
script_args.extend(['--build-temp', temp_dir])
|
||||
setup(
|
||||
script_name='setup.py',
|
||||
script_args=script_args,
|
||||
ext_modules=ext_modules,
|
||||
)
|
||||
finally:
|
||||
if base_dir:
|
||||
os.chdir(cwd)
|
||||
if temp_dir and os.path.isdir(temp_dir):
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def parse_args(args):
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage='%prog [options] [sources and packages]+')
|
||||
|
||||
parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
|
||||
dest='directives', default={}, type="str",
|
||||
action='callback', callback=parse_directives,
|
||||
help='set a compiler directive')
|
||||
parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
||||
dest='compile_time_env', default={}, type="str",
|
||||
action='callback', callback=parse_compile_time_env,
|
||||
help='set a compile time environment variable')
|
||||
parser.add_option('-s', '--option', metavar='NAME=VALUE',
|
||||
dest='options', default={}, type="str",
|
||||
action='callback', callback=parse_options,
|
||||
help='set a cythonize option')
|
||||
parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help='use Python 2 syntax mode by default')
|
||||
parser.add_option('-3', dest='language_level', action='store_const', const=3,
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
|
||||
help='use Python 3 syntax mode by default')
|
||||
parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
|
||||
help='generate annotated HTML page for source files')
|
||||
|
||||
parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
||||
action='append', default=[],
|
||||
help='exclude certain file patterns from the compilation')
|
||||
|
||||
parser.add_option('-b', '--build', dest='build', action='store_true',
|
||||
help='build extension modules using distutils')
|
||||
parser.add_option('-i', '--inplace', dest='build_inplace', action='store_true',
|
||||
help='build extension modules in place using distutils (implies -b)')
|
||||
parser.add_option('-j', '--parallel', dest='parallel', metavar='N',
|
||||
type=int, default=parallel_compiles,
|
||||
help=('run builds in N parallel jobs (default: %d)' %
|
||||
parallel_compiles or 1))
|
||||
parser.add_option('-f', '--force', dest='force', action='store_true',
|
||||
help='force recompilation')
|
||||
parser.add_option('-q', '--quiet', dest='quiet', action='store_true',
|
||||
help='be less verbose during compilation')
|
||||
|
||||
parser.add_option('--lenient', dest='lenient', action='store_true',
|
||||
help='increase Python compatibility by ignoring some compile time errors')
|
||||
parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true',
|
||||
help='compile as much as possible, ignore compilation failures')
|
||||
parser.add_option('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
||||
|
||||
options, args = parser.parse_args(args)
|
||||
if not args:
|
||||
parser.error("no source files provided")
|
||||
if options.build_inplace:
|
||||
options.build = True
|
||||
if multiprocessing is None:
|
||||
options.parallel = 0
|
||||
if options.language_level:
|
||||
assert options.language_level in (2, 3, '3str')
|
||||
options.options['language_level'] = options.language_level
|
||||
return options, args
|
||||
|
||||
|
||||
def main(args=None):
|
||||
options, paths = parse_args(args)
|
||||
|
||||
if options.lenient:
|
||||
# increase Python compatibility by ignoring compile time errors
|
||||
Options.error_on_unknown_names = False
|
||||
Options.error_on_uninitialized = False
|
||||
|
||||
if options.annotate:
|
||||
Options.annotate = True
|
||||
|
||||
for path in paths:
|
||||
cython_compile(path, options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
1293
kivy_venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
1293
kivy_venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
||||
from Cython.Distutils.build_ext import build_ext
|
376
kivy_venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
376
kivy_venv/lib/python3.11/site-packages/Cython/Build/Inline.py
Normal file
@ -0,0 +1,376 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
import Cython
|
||||
from ..Compiler.Main import Context, default_options
|
||||
|
||||
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
||||
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
||||
from ..Compiler.TreeFragment import parse_from_strings
|
||||
from ..Compiler.StringEncoding import _unicode
|
||||
from .Dependencies import strip_string_literals, cythonize, cached_function
|
||||
from ..Compiler import Pipeline
|
||||
from ..Utils import get_cython_cache_dir
|
||||
import cython as cython_module
|
||||
|
||||
|
||||
IS_PY3 = sys.version_info >= (3,)
|
||||
|
||||
# A utility function to convert user-supplied ASCII strings to unicode.
|
||||
if not IS_PY3:
|
||||
def to_unicode(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('ascii')
|
||||
else:
|
||||
return s
|
||||
else:
|
||||
to_unicode = lambda x: x
|
||||
|
||||
if sys.version_info < (3, 5):
|
||||
import imp
|
||||
def load_dynamic(name, module_path):
|
||||
return imp.load_dynamic(name, module_path)
|
||||
else:
|
||||
import importlib.util as _importlib_util
|
||||
def load_dynamic(name, module_path):
|
||||
spec = _importlib_util.spec_from_file_location(name, module_path)
|
||||
module = _importlib_util.module_from_spec(spec)
|
||||
# sys.modules[name] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
||||
def __init__(self):
|
||||
CythonTransform.__init__(self, None)
|
||||
self.unbound = set()
|
||||
def visit_NameNode(self, node):
|
||||
if not self.current_env().lookup(node.name):
|
||||
self.unbound.add(node.name)
|
||||
return node
|
||||
def __call__(self, node):
|
||||
super(UnboundSymbols, self).__call__(node)
|
||||
return self.unbound
|
||||
|
||||
|
||||
@cached_function
|
||||
def unbound_symbols(code, context=None):
|
||||
code = to_unicode(code)
|
||||
if context is None:
|
||||
context = Context([], default_options)
|
||||
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
||||
tree = parse_from_strings('(tree fragment)', code)
|
||||
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
||||
if phase is None:
|
||||
continue
|
||||
tree = phase(tree)
|
||||
if isinstance(phase, AnalyseDeclarationsTransform):
|
||||
break
|
||||
try:
|
||||
import builtins
|
||||
except ImportError:
|
||||
import __builtin__ as builtins
|
||||
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
||||
|
||||
|
||||
def unsafe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type is int:
|
||||
return 'long'
|
||||
else:
|
||||
return safe_type(arg, context)
|
||||
|
||||
|
||||
def safe_type(arg, context=None):
|
||||
py_type = type(arg)
|
||||
if py_type in (list, tuple, dict, str):
|
||||
return py_type.__name__
|
||||
elif py_type is complex:
|
||||
return 'double complex'
|
||||
elif py_type is float:
|
||||
return 'double'
|
||||
elif py_type is bool:
|
||||
return 'bint'
|
||||
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
||||
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
||||
else:
|
||||
for base_type in py_type.__mro__:
|
||||
if base_type.__module__ in ('__builtin__', 'builtins'):
|
||||
return 'object'
|
||||
module = context.find_module(base_type.__module__, need_pxd=False)
|
||||
if module:
|
||||
entry = module.lookup(base_type.__name__)
|
||||
if entry.is_type:
|
||||
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
||||
return 'object'
|
||||
|
||||
|
||||
def _get_build_extension():
|
||||
dist = Distribution()
|
||||
# Ensure the build respects distutils configuration by parsing
|
||||
# the configuration files
|
||||
config_files = dist.find_config_files()
|
||||
dist.parse_config_files(config_files)
|
||||
build_extension = build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
return build_extension
|
||||
|
||||
|
||||
@cached_function
|
||||
def _create_context(cython_include_dirs):
|
||||
return Context(list(cython_include_dirs), default_options)
|
||||
|
||||
|
||||
_cython_inline_cache = {}
|
||||
_cython_inline_default_context = _create_context(('.',))
|
||||
|
||||
|
||||
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
||||
for symbol in unbound_symbols:
|
||||
if symbol not in kwds:
|
||||
if locals is None or globals is None:
|
||||
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
||||
if locals is None:
|
||||
locals = calling_frame.f_locals
|
||||
if globals is None:
|
||||
globals = calling_frame.f_globals
|
||||
if symbol in locals:
|
||||
kwds[symbol] = locals[symbol]
|
||||
elif symbol in globals:
|
||||
kwds[symbol] = globals[symbol]
|
||||
else:
|
||||
print("Couldn't find %r" % symbol)
|
||||
|
||||
|
||||
def _inline_key(orig_code, arg_sigs, language_level):
|
||||
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
||||
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
def cython_inline(code, get_type=unsafe_type,
|
||||
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
||||
cython_include_dirs=None, cython_compiler_directives=None,
|
||||
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
||||
|
||||
if get_type is None:
|
||||
get_type = lambda x: 'object'
|
||||
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
||||
|
||||
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
||||
if language_level is None and 'language_level' not in cython_compiler_directives:
|
||||
language_level = '3str'
|
||||
if language_level is not None:
|
||||
cython_compiler_directives['language_level'] = language_level
|
||||
|
||||
# Fast path if this has been called in this session.
|
||||
_unbound_symbols = _cython_inline_cache.get(code)
|
||||
if _unbound_symbols is not None:
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
args = sorted(kwds.items())
|
||||
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
||||
key_hash = _inline_key(code, arg_sigs, language_level)
|
||||
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
||||
if invoke is not None:
|
||||
arg_list = [arg[1] for arg in args]
|
||||
return invoke(*arg_list)
|
||||
|
||||
orig_code = code
|
||||
code = to_unicode(code)
|
||||
code, literals = strip_string_literals(code)
|
||||
code = strip_common_indent(code)
|
||||
if locals is None:
|
||||
locals = inspect.currentframe().f_back.f_back.f_locals
|
||||
if globals is None:
|
||||
globals = inspect.currentframe().f_back.f_back.f_globals
|
||||
try:
|
||||
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
||||
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
||||
except AssertionError:
|
||||
if not quiet:
|
||||
# Parsing from strings not fully supported (e.g. cimports).
|
||||
print("Could not parse code as a string (to extract unbound symbols).")
|
||||
|
||||
cimports = []
|
||||
for name, arg in list(kwds.items()):
|
||||
if arg is cython_module:
|
||||
cimports.append('\ncimport cython as %s' % name)
|
||||
del kwds[name]
|
||||
arg_names = sorted(kwds)
|
||||
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
||||
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
||||
module_name = "_cython_inline_" + key_hash
|
||||
|
||||
if module_name in sys.modules:
|
||||
module = sys.modules[module_name]
|
||||
|
||||
else:
|
||||
build_extension = None
|
||||
if cython_inline.so_ext is None:
|
||||
# Figure out and cache current extension suffix
|
||||
build_extension = _get_build_extension()
|
||||
cython_inline.so_ext = build_extension.get_ext_filename('')
|
||||
|
||||
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
if force or not os.path.isfile(module_path):
|
||||
cflags = []
|
||||
c_include_dirs = []
|
||||
qualified = re.compile(r'([.\w]+)[.]')
|
||||
for type, _ in arg_sigs:
|
||||
m = qualified.match(type)
|
||||
if m:
|
||||
cimports.append('\ncimport %s' % m.groups()[0])
|
||||
# one special case
|
||||
if m.groups()[0] == 'numpy':
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
# cflags.append('-Wno-unused')
|
||||
module_body, func_body = extract_func_code(code)
|
||||
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
||||
module_code = """
|
||||
%(module_body)s
|
||||
%(cimports)s
|
||||
def __invoke(%(params)s):
|
||||
%(func_body)s
|
||||
return locals()
|
||||
""" % {'cimports': '\n'.join(cimports),
|
||||
'module_body': module_body,
|
||||
'params': params,
|
||||
'func_body': func_body }
|
||||
for key, value in literals.items():
|
||||
module_code = module_code.replace(key, value)
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
fh = open(pyx_file, 'w')
|
||||
try:
|
||||
fh.write(module_code)
|
||||
finally:
|
||||
fh.close()
|
||||
extension = Extension(
|
||||
name = module_name,
|
||||
sources = [pyx_file],
|
||||
include_dirs = c_include_dirs,
|
||||
extra_compile_args = cflags)
|
||||
if build_extension is None:
|
||||
build_extension = _get_build_extension()
|
||||
build_extension.extensions = cythonize(
|
||||
[extension],
|
||||
include_path=cython_include_dirs or ['.'],
|
||||
compiler_directives=cython_compiler_directives,
|
||||
quiet=quiet)
|
||||
build_extension.build_temp = os.path.dirname(pyx_file)
|
||||
build_extension.build_lib = lib_dir
|
||||
build_extension.run()
|
||||
|
||||
module = load_dynamic(module_name, module_path)
|
||||
|
||||
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
||||
arg_list = [kwds[arg] for arg in arg_names]
|
||||
return module.__invoke(*arg_list)
|
||||
|
||||
|
||||
# Cached suffix used by cython_inline above. None should get
|
||||
# overridden with actual value upon the first cython_inline invocation
|
||||
cython_inline.so_ext = None
|
||||
|
||||
_find_non_space = re.compile('[^ ]').search
|
||||
|
||||
|
||||
def strip_common_indent(code):
|
||||
min_indent = None
|
||||
lines = code.splitlines()
|
||||
for line in lines:
|
||||
match = _find_non_space(line)
|
||||
if not match:
|
||||
continue # blank
|
||||
indent = match.start()
|
||||
if line[indent] == '#':
|
||||
continue # comment
|
||||
if min_indent is None or min_indent > indent:
|
||||
min_indent = indent
|
||||
for ix, line in enumerate(lines):
|
||||
match = _find_non_space(line)
|
||||
if not match or not line or line[indent:indent+1] == '#':
|
||||
continue
|
||||
lines[ix] = line[min_indent:]
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
||||
def extract_func_code(code):
|
||||
module = []
|
||||
function = []
|
||||
current = function
|
||||
code = code.replace('\t', ' ')
|
||||
lines = code.split('\n')
|
||||
for line in lines:
|
||||
if not line.startswith(' '):
|
||||
if module_statement.match(line):
|
||||
current = module
|
||||
else:
|
||||
current = function
|
||||
current.append(line)
|
||||
return '\n'.join(module), ' ' + '\n '.join(function)
|
||||
|
||||
|
||||
try:
|
||||
from inspect import getcallargs
|
||||
except ImportError:
|
||||
def getcallargs(func, *arg_values, **kwd_values):
|
||||
all = {}
|
||||
args, varargs, kwds, defaults = inspect.getargspec(func)
|
||||
if varargs is not None:
|
||||
all[varargs] = arg_values[len(args):]
|
||||
for name, value in zip(args, arg_values):
|
||||
all[name] = value
|
||||
for name, value in list(kwd_values.items()):
|
||||
if name in args:
|
||||
if name in all:
|
||||
raise TypeError("Duplicate argument %s" % name)
|
||||
all[name] = kwd_values.pop(name)
|
||||
if kwds is not None:
|
||||
all[kwds] = kwd_values
|
||||
elif kwd_values:
|
||||
raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
|
||||
if defaults is None:
|
||||
defaults = ()
|
||||
first_default = len(args) - len(defaults)
|
||||
for ix, name in enumerate(args):
|
||||
if name not in all:
|
||||
if ix >= first_default:
|
||||
all[name] = defaults[ix - first_default]
|
||||
else:
|
||||
raise TypeError("Missing argument: %s" % name)
|
||||
return all
|
||||
|
||||
|
||||
def get_body(source):
|
||||
ix = source.index(':')
|
||||
if source[:5] == 'lambda':
|
||||
return "return %s" % source[ix+1:]
|
||||
else:
|
||||
return source[ix+1:]
|
||||
|
||||
|
||||
# Lots to be done here... It would be especially cool if compiled functions
|
||||
# could invoke each other quickly.
|
||||
class RuntimeCompiledFunction(object):
|
||||
|
||||
def __init__(self, f):
|
||||
self._f = f
|
||||
self._body = get_body(inspect.getsource(f))
|
||||
|
||||
def __call__(self, *args, **kwds):
|
||||
all = getcallargs(self._f, *args, **kwds)
|
||||
if IS_PY3:
|
||||
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
||||
else:
|
||||
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
|
@ -0,0 +1,565 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
=====================
|
||||
Cython related magics
|
||||
=====================
|
||||
|
||||
Magic command interface for interactive work with Cython
|
||||
|
||||
.. note::
|
||||
|
||||
The ``Cython`` package needs to be installed separately. It
|
||||
can be obtained using ``easy_install`` or ``pip``.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
To enable the magics below, execute ``%load_ext cython``.
|
||||
|
||||
``%%cython``
|
||||
|
||||
{CYTHON_DOC}
|
||||
|
||||
``%%cython_inline``
|
||||
|
||||
{CYTHON_INLINE_DOC}
|
||||
|
||||
``%%cython_pyximport``
|
||||
|
||||
{CYTHON_PYXIMPORT_DOC}
|
||||
|
||||
Author:
|
||||
* Brian Granger
|
||||
|
||||
Code moved from IPython and adapted by:
|
||||
* Martín Gaitán
|
||||
|
||||
Parts of this code were taken from Cython.inline.
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2010-2011, IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import imp
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import copy
|
||||
import distutils.log
|
||||
import textwrap
|
||||
|
||||
IO_ENCODING = sys.getfilesystemencoding()
|
||||
IS_PY2 = sys.version_info[0] < 3
|
||||
|
||||
try:
|
||||
reload
|
||||
except NameError: # Python 3
|
||||
from imp import reload
|
||||
|
||||
try:
|
||||
import hashlib
|
||||
except ImportError:
|
||||
import md5 as hashlib
|
||||
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
from IPython.core import display
|
||||
from IPython.core import magic_arguments
|
||||
from IPython.core.magic import Magics, magics_class, cell_magic
|
||||
try:
|
||||
from IPython.paths import get_ipython_cache_dir
|
||||
except ImportError:
|
||||
# older IPython version
|
||||
from IPython.utils.path import get_ipython_cache_dir
|
||||
from IPython.utils.text import dedent
|
||||
|
||||
from ..Shadow import __version__ as cython_version
|
||||
from ..Compiler.Errors import CompileError
|
||||
from .Inline import cython_inline
|
||||
from .Dependencies import cythonize
|
||||
|
||||
|
||||
PGO_CONFIG = {
|
||||
'gcc': {
|
||||
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
||||
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
||||
},
|
||||
# blind copy from 'configure' script in CPython 3.7
|
||||
'icc': {
|
||||
'gen': ['-prof-gen'],
|
||||
'use': ['-prof-use'],
|
||||
}
|
||||
}
|
||||
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
||||
|
||||
|
||||
if IS_PY2:
|
||||
def encode_fs(name):
|
||||
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
|
||||
else:
|
||||
def encode_fs(name):
|
||||
return name
|
||||
|
||||
|
||||
@magics_class
|
||||
class CythonMagics(Magics):
|
||||
|
||||
def __init__(self, shell):
|
||||
super(CythonMagics, self).__init__(shell)
|
||||
self._reloads = {}
|
||||
self._code_cache = {}
|
||||
self._pyximport_installed = False
|
||||
|
||||
def _import_all(self, module):
|
||||
mdict = module.__dict__
|
||||
if '__all__' in mdict:
|
||||
keys = mdict['__all__']
|
||||
else:
|
||||
keys = [k for k in mdict if not k.startswith('_')]
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
self.shell.push({k: mdict[k]})
|
||||
except KeyError:
|
||||
msg = "'module' object has no attribute '%s'" % k
|
||||
raise AttributeError(msg)
|
||||
|
||||
@cell_magic
|
||||
def cython_inline(self, line, cell):
|
||||
"""Compile and run a Cython code cell using Cython.inline.
|
||||
|
||||
This magic simply passes the body of the cell to Cython.inline
|
||||
and returns the result. If the variables `a` and `b` are defined
|
||||
in the user's namespace, here is a simple example that returns
|
||||
their sum::
|
||||
|
||||
%%cython_inline
|
||||
return a+b
|
||||
|
||||
For most purposes, we recommend the usage of the `%%cython` magic.
|
||||
"""
|
||||
locs = self.shell.user_global_ns
|
||||
globs = self.shell.user_ns
|
||||
return cython_inline(cell, locals=locs, globals=globs)
|
||||
|
||||
@cell_magic
|
||||
def cython_pyximport(self, line, cell):
|
||||
"""Compile and import a Cython code cell using pyximport.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the current
|
||||
working directory, which is then imported using `pyximport`. This
|
||||
magic requires a module name to be passed::
|
||||
|
||||
%%cython_pyximport modulename
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
The compiled module is then imported and all of its symbols are
|
||||
injected into the user's namespace. For most purposes, we recommend
|
||||
the usage of the `%%cython` magic.
|
||||
"""
|
||||
module_name = line.strip()
|
||||
if not module_name:
|
||||
raise ValueError('module name must be given')
|
||||
fname = module_name + '.pyx'
|
||||
with io.open(fname, 'w', encoding='utf-8') as f:
|
||||
f.write(cell)
|
||||
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
||||
import pyximport
|
||||
pyximport.install()
|
||||
self._pyximport_installed = True
|
||||
if module_name in self._reloads:
|
||||
module = self._reloads[module_name]
|
||||
# Note: reloading extension modules is not actually supported
|
||||
# (requires PEP-489 reinitialisation support).
|
||||
# Don't know why this should ever have worked as it reads here.
|
||||
# All we really need to do is to update the globals below.
|
||||
#reload(module)
|
||||
else:
|
||||
__import__(module_name)
|
||||
module = sys.modules[module_name]
|
||||
self._reloads[module_name] = module
|
||||
self._import_all(module)
|
||||
|
||||
@magic_arguments.magic_arguments()
|
||||
@magic_arguments.argument(
|
||||
'-a', '--annotate', action='store_true', default=False,
|
||||
help="Produce a colorized HTML version of the source."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-+', '--cplus', action='store_true', default=False,
|
||||
help="Output a C++ rather than C file."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-3', dest='language_level', action='store_const', const=3, default=None,
|
||||
help="Select Python 3 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-2', dest='language_level', action='store_const', const=2, default=None,
|
||||
help="Select Python 2 syntax."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-f', '--force', action='store_true', default=False,
|
||||
help="Force the compilation of a new module, even if the source has been "
|
||||
"previously compiled."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-c', '--compile-args', action='append', default=[],
|
||||
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--link-args', action='append', default=[],
|
||||
help="Extra flags to pass to linker via the `extra_link_args` "
|
||||
"Extension flag (can be specified multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-l', '--lib', action='append', default=[],
|
||||
help="Add a library to link the extension against (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-n', '--name',
|
||||
help="Specify a name for the Cython module."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
||||
help="Add a path to the list of library directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-I', '--include', action='append', default=[],
|
||||
help="Add a path to the list of include directories (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'-S', '--src', action='append', default=[],
|
||||
help="Add a path to the list of src files (can be specified "
|
||||
"multiple times)."
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--pgo', dest='pgo', action='store_true', default=False,
|
||||
help=("Enable profile guided optimisation in the C compiler. "
|
||||
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
||||
)
|
||||
@magic_arguments.argument(
|
||||
'--verbose', dest='quiet', action='store_false', default=True,
|
||||
help=("Print debug information like generated .c/.cpp file location "
|
||||
"and exact gcc/g++ command invoked.")
|
||||
)
|
||||
@cell_magic
|
||||
def cython(self, line, cell):
|
||||
"""Compile and import everything from a Cython code cell.
|
||||
|
||||
The contents of the cell are written to a `.pyx` file in the
|
||||
directory `IPYTHONDIR/cython` using a filename with the hash of the
|
||||
code. This file is then cythonized and compiled. The resulting module
|
||||
is imported and all of its symbols are injected into the user's
|
||||
namespace. The usage is similar to that of `%%cython_pyximport` but
|
||||
you don't have to pass a module name::
|
||||
|
||||
%%cython
|
||||
def f(x):
|
||||
return 2.0*x
|
||||
|
||||
To compile OpenMP codes, pass the required `--compile-args`
|
||||
and `--link-args`. For example with gcc::
|
||||
|
||||
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
||||
...
|
||||
|
||||
To enable profile guided optimisation, pass the ``--pgo`` option.
|
||||
Note that the cell itself needs to take care of establishing a suitable
|
||||
profile when executed. This can be done by implementing the functions to
|
||||
optimise, and then calling them directly in the same cell on some realistic
|
||||
training data like this::
|
||||
|
||||
%%cython --pgo
|
||||
def critical_function(data):
|
||||
for item in data:
|
||||
...
|
||||
|
||||
# execute function several times to build profile
|
||||
from somewhere import some_typical_data
|
||||
for _ in range(100):
|
||||
critical_function(some_typical_data)
|
||||
|
||||
In Python 3.5 and later, you can distinguish between the profile and
|
||||
non-profile runs as follows::
|
||||
|
||||
if "_pgo_" in __name__:
|
||||
... # execute critical code here
|
||||
"""
|
||||
args = magic_arguments.parse_argstring(self.cython, line)
|
||||
code = cell if cell.endswith('\n') else cell + '\n'
|
||||
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
||||
key = (code, line, sys.version_info, sys.executable, cython_version)
|
||||
|
||||
if not os.path.exists(lib_dir):
|
||||
os.makedirs(lib_dir)
|
||||
|
||||
if args.pgo:
|
||||
key += ('pgo',)
|
||||
if args.force:
|
||||
# Force a new module name by adding the current time to the
|
||||
# key which is hashed to determine the module name.
|
||||
key += (time.time(),)
|
||||
|
||||
if args.name:
|
||||
module_name = str(args.name) # no-op in Py3
|
||||
else:
|
||||
module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest()
|
||||
html_file = os.path.join(lib_dir, module_name + '.html')
|
||||
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
||||
|
||||
have_module = os.path.isfile(module_path)
|
||||
need_cythonize = args.pgo or not have_module
|
||||
|
||||
if args.annotate:
|
||||
if not os.path.isfile(html_file):
|
||||
need_cythonize = True
|
||||
|
||||
extension = None
|
||||
if need_cythonize:
|
||||
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
||||
if extensions is None:
|
||||
# Compilation failed and printed error message
|
||||
return None
|
||||
assert len(extensions) == 1
|
||||
extension = extensions[0]
|
||||
self._code_cache[key] = module_name
|
||||
|
||||
if args.pgo:
|
||||
self._profile_pgo_wrapper(extension, lib_dir)
|
||||
|
||||
try:
|
||||
self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None,
|
||||
quiet=args.quiet)
|
||||
except distutils.errors.CompileError:
|
||||
# Build failed and printed error message
|
||||
return None
|
||||
|
||||
module = imp.load_dynamic(module_name, module_path)
|
||||
self._import_all(module)
|
||||
|
||||
if args.annotate:
|
||||
try:
|
||||
with io.open(html_file, encoding='utf-8') as f:
|
||||
annotated_html = f.read()
|
||||
except IOError as e:
|
||||
# File could not be opened. Most likely the user has a version
|
||||
# of Cython before 0.15.1 (when `cythonize` learned the
|
||||
# `force` keyword argument) and has already compiled this
|
||||
# exact source without annotation.
|
||||
print('Cython completed successfully but the annotated '
|
||||
'source could not be read.', file=sys.stderr)
|
||||
print(e, file=sys.stderr)
|
||||
else:
|
||||
return display.HTML(self.clean_annotated_html(annotated_html))
|
||||
|
||||
def _profile_pgo_wrapper(self, extension, lib_dir):
|
||||
"""
|
||||
Generate a .c file for a separate extension module that calls the
|
||||
module init function of the original module. This makes sure that the
|
||||
PGO profiler sees the correct .o file of the final module, but it still
|
||||
allows us to import the module under a different name for profiling,
|
||||
before recompiling it into the PGO optimised module. Overwriting and
|
||||
reimporting the same shared library is not portable.
|
||||
"""
|
||||
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
||||
module_name = extension.name
|
||||
pgo_module_name = '_pgo_' + module_name
|
||||
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
||||
with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
||||
f.write(textwrap.dedent(u"""
|
||||
#include "Python.h"
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
extern PyMODINIT_FUNC init%(module_name)s(void);
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC init%(pgo_module_name)s(void) {
|
||||
PyObject *sys_modules;
|
||||
init%(module_name)s(); if (PyErr_Occurred()) return;
|
||||
sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
|
||||
if (sys_modules) {
|
||||
PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
|
||||
PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
|
||||
Py_DECREF(module);
|
||||
}
|
||||
}
|
||||
#else
|
||||
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
||||
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
||||
return PyInit_%(module_name)s();
|
||||
}
|
||||
#endif
|
||||
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
||||
|
||||
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
||||
extension.name = pgo_module_name
|
||||
|
||||
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
||||
|
||||
# import and execute module code to generate profile
|
||||
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
||||
imp.load_dynamic(pgo_module_name, so_module_path)
|
||||
|
||||
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
||||
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
||||
pyx_file = encode_fs(pyx_file)
|
||||
|
||||
c_include_dirs = args.include
|
||||
c_src_files = list(map(str, args.src))
|
||||
if 'numpy' in code:
|
||||
import numpy
|
||||
c_include_dirs.append(numpy.get_include())
|
||||
with io.open(pyx_file, 'w', encoding='utf-8') as f:
|
||||
f.write(code)
|
||||
extension = Extension(
|
||||
name=module_name,
|
||||
sources=[pyx_file] + c_src_files,
|
||||
include_dirs=c_include_dirs,
|
||||
library_dirs=args.library_dirs,
|
||||
extra_compile_args=args.compile_args,
|
||||
extra_link_args=args.link_args,
|
||||
libraries=args.lib,
|
||||
language='c++' if args.cplus else 'c',
|
||||
)
|
||||
try:
|
||||
opts = dict(
|
||||
quiet=quiet,
|
||||
annotate=args.annotate,
|
||||
force=True,
|
||||
)
|
||||
if args.language_level is not None:
|
||||
assert args.language_level in (2, 3)
|
||||
opts['language_level'] = args.language_level
|
||||
elif sys.version_info[0] >= 3:
|
||||
opts['language_level'] = 3
|
||||
return cythonize([extension], **opts)
|
||||
except CompileError:
|
||||
return None
|
||||
|
||||
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
||||
build_extension = self._get_build_extension(
|
||||
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
||||
old_threshold = None
|
||||
try:
|
||||
if not quiet:
|
||||
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
||||
build_extension.run()
|
||||
finally:
|
||||
if not quiet and old_threshold is not None:
|
||||
distutils.log.set_threshold(old_threshold)
|
||||
|
||||
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
||||
compiler_type = build_extension.compiler.compiler_type
|
||||
if compiler_type == 'unix':
|
||||
compiler_cmd = build_extension.compiler.compiler_so
|
||||
# TODO: we could try to call "[cmd] --version" for better insights
|
||||
if not compiler_cmd:
|
||||
pass
|
||||
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
||||
compiler_type = 'clang'
|
||||
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
||||
compiler_type = 'icc'
|
||||
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
||||
compiler_type = 'gcc'
|
||||
config = PGO_CONFIG.get(compiler_type)
|
||||
orig_flags = []
|
||||
if config and step_name in config:
|
||||
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
||||
for extension in build_extension.extensions:
|
||||
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
||||
extension.extra_compile_args = extension.extra_compile_args + flags
|
||||
extension.extra_link_args = extension.extra_link_args + flags
|
||||
else:
|
||||
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
||||
file=sys.stderr)
|
||||
return orig_flags
|
||||
|
||||
@property
|
||||
def so_ext(self):
|
||||
"""The extension suffix for compiled modules."""
|
||||
try:
|
||||
return self._so_ext
|
||||
except AttributeError:
|
||||
self._so_ext = self._get_build_extension().get_ext_filename('')
|
||||
return self._so_ext
|
||||
|
||||
def _clear_distutils_mkpath_cache(self):
|
||||
"""clear distutils mkpath cache
|
||||
|
||||
prevents distutils from skipping re-creation of dirs that have been removed
|
||||
"""
|
||||
try:
|
||||
from distutils.dir_util import _path_created
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
_path_created.clear()
|
||||
|
||||
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
||||
pgo_step_name=None, _build_ext=build_ext):
|
||||
self._clear_distutils_mkpath_cache()
|
||||
dist = Distribution()
|
||||
config_files = dist.find_config_files()
|
||||
try:
|
||||
config_files.remove('setup.cfg')
|
||||
except ValueError:
|
||||
pass
|
||||
dist.parse_config_files(config_files)
|
||||
|
||||
if not temp_dir:
|
||||
temp_dir = lib_dir
|
||||
add_pgo_flags = self._add_pgo_flags
|
||||
|
||||
if pgo_step_name:
|
||||
base_build_ext = _build_ext
|
||||
class _build_ext(_build_ext):
|
||||
def build_extensions(self):
|
||||
add_pgo_flags(self, pgo_step_name, temp_dir)
|
||||
base_build_ext.build_extensions(self)
|
||||
|
||||
build_extension = _build_ext(dist)
|
||||
build_extension.finalize_options()
|
||||
if temp_dir:
|
||||
temp_dir = encode_fs(temp_dir)
|
||||
build_extension.build_temp = temp_dir
|
||||
if lib_dir:
|
||||
lib_dir = encode_fs(lib_dir)
|
||||
build_extension.build_lib = lib_dir
|
||||
if extension is not None:
|
||||
build_extension.extensions = [extension]
|
||||
return build_extension
|
||||
|
||||
@staticmethod
|
||||
def clean_annotated_html(html):
|
||||
"""Clean up the annotated HTML source.
|
||||
|
||||
Strips the link to the generated C or C++ file, which we do not
|
||||
present to the user.
|
||||
"""
|
||||
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
||||
html = '\n'.join(l for l in html.splitlines() if not r.match(l))
|
||||
return html
|
||||
|
||||
__doc__ = __doc__.format(
|
||||
# rST doesn't see the -+ flag as part of an option list, so we
|
||||
# hide it from the module-level docstring.
|
||||
CYTHON_DOC=dedent(CythonMagics.cython.__doc__\
|
||||
.replace('-+, --cplus', '--cplus ')),
|
||||
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
||||
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
||||
)
|
@ -0,0 +1,106 @@
|
||||
import difflib
|
||||
import glob
|
||||
import gzip
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import Cython.Build.Dependencies
|
||||
import Cython.Utils
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
|
||||
class TestCyCache(CythonTest):
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.temp_dir = tempfile.mkdtemp(
|
||||
prefix='cycache-test',
|
||||
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
|
||||
self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
||||
self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
|
||||
|
||||
def cache_files(self, file_glob):
|
||||
return glob.glob(os.path.join(self.cache_dir, file_glob))
|
||||
|
||||
def fresh_cythonize(self, *args, **kwargs):
|
||||
Cython.Utils.clear_function_caches()
|
||||
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
||||
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
||||
|
||||
def test_cycache_switch(self):
|
||||
content1 = 'value = 1\n'
|
||||
content2 = 'value = 2\n'
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
|
||||
open(a_pyx, 'w').write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(1, len(self.cache_files('a.c*')))
|
||||
a_contents1 = open(a_c).read()
|
||||
os.unlink(a_c)
|
||||
|
||||
open(a_pyx, 'w').write(content2)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_contents2 = open(a_c).read()
|
||||
os.unlink(a_c)
|
||||
|
||||
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
|
||||
open(a_pyx, 'w').write(content1)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
self.assertEqual(2, len(self.cache_files('a.c*')))
|
||||
a_contents = open(a_c).read()
|
||||
self.assertEqual(
|
||||
a_contents, a_contents1,
|
||||
msg='\n'.join(list(difflib.unified_diff(
|
||||
a_contents.split('\n'), a_contents1.split('\n')))[:10]))
|
||||
|
||||
def test_cycache_uses_cache(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
open(a_pyx, 'w').write('pass')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
|
||||
gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
|
||||
os.unlink(a_c)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
a_contents = open(a_c).read()
|
||||
self.assertEqual(a_contents, 'fake stuff',
|
||||
'Unexpected contents: %s...' % a_contents[:100])
|
||||
|
||||
def test_multi_file_output(self):
|
||||
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
||||
a_c = a_pyx[:-4] + '.c'
|
||||
a_h = a_pyx[:-4] + '.h'
|
||||
a_api_h = a_pyx[:-4] + '_api.h'
|
||||
open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
expected = [a_c, a_h, a_api_h]
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
os.unlink(output)
|
||||
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
||||
for output in expected:
|
||||
self.assertTrue(os.path.exists(output), output)
|
||||
|
||||
def test_options_invalidation(self):
|
||||
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
|
||||
hash_c = hash_pyx[:-len('.pyx')] + '.c'
|
||||
|
||||
open(hash_pyx, 'w').write('pass')
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
|
||||
self.assertEqual(1, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
||||
|
||||
os.unlink(hash_c)
|
||||
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
|
||||
self.assertEqual(2, len(self.cache_files('options.c*')))
|
@ -0,0 +1,96 @@
|
||||
import os, tempfile
|
||||
from Cython.Shadow import inline
|
||||
from Cython.Build.Inline import safe_type
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
try:
|
||||
import numpy
|
||||
has_numpy = True
|
||||
except:
|
||||
has_numpy = False
|
||||
|
||||
test_kwds = dict(force=True, quiet=True)
|
||||
|
||||
global_value = 100
|
||||
|
||||
class TestInline(CythonTest):
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self.test_kwds = dict(test_kwds)
|
||||
if os.path.isdir('TEST_TMP'):
|
||||
lib_dir = os.path.join('TEST_TMP','inline')
|
||||
else:
|
||||
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
||||
self.test_kwds['lib_dir'] = lib_dir
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(inline("return 1+2", **self.test_kwds), 3)
|
||||
|
||||
def test_types(self):
|
||||
self.assertEqual(inline("""
|
||||
cimport cython
|
||||
return cython.typeof(a), cython.typeof(b)
|
||||
""", a=1.0, b=[], **self.test_kwds), ('double', 'list object'))
|
||||
|
||||
def test_locals(self):
|
||||
a = 1
|
||||
b = 2
|
||||
self.assertEqual(inline("return a+b", **self.test_kwds), 3)
|
||||
|
||||
def test_globals(self):
|
||||
self.assertEqual(inline("return global_value + 1", **self.test_kwds), global_value + 1)
|
||||
|
||||
def test_no_return(self):
|
||||
self.assertEqual(inline("""
|
||||
a = 1
|
||||
cdef double b = 2
|
||||
cdef c = []
|
||||
""", **self.test_kwds), dict(a=1, b=2.0, c=[]))
|
||||
|
||||
def test_def_node(self):
|
||||
foo = inline("def foo(x): return x * x", **self.test_kwds)['foo']
|
||||
self.assertEqual(foo(7), 49)
|
||||
|
||||
def test_class_ref(self):
|
||||
class Type(object):
|
||||
pass
|
||||
tp = inline("Type")['Type']
|
||||
self.assertEqual(tp, Type)
|
||||
|
||||
def test_pure(self):
|
||||
import cython as cy
|
||||
b = inline("""
|
||||
b = cy.declare(float, a)
|
||||
c = cy.declare(cy.pointer(cy.float), &b)
|
||||
return b
|
||||
""", a=3, **self.test_kwds)
|
||||
self.assertEqual(type(b), float)
|
||||
|
||||
def test_compiler_directives(self):
|
||||
self.assertEqual(
|
||||
inline('return sum(x)',
|
||||
x=[1, 2, 3],
|
||||
cython_compiler_directives={'boundscheck': False}),
|
||||
6
|
||||
)
|
||||
|
||||
def test_lang_version(self):
|
||||
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
||||
inline_divcode = "def f(int a, int b): return a/b"
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=2)['f'](5,2),
|
||||
2
|
||||
)
|
||||
self.assertEqual(
|
||||
inline(inline_divcode, language_level=3)['f'](5,2),
|
||||
2.5
|
||||
)
|
||||
|
||||
if has_numpy:
|
||||
|
||||
def test_numpy(self):
|
||||
import numpy
|
||||
a = numpy.ndarray((10, 20))
|
||||
a[0,0] = 10
|
||||
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
||||
self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0)
|
@ -0,0 +1,205 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# tag: ipython
|
||||
|
||||
"""Tests for the Cython magics extension."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from Cython.Build import IpythonMagic
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
try:
|
||||
import IPython.testing.globalipapp
|
||||
except ImportError:
|
||||
# Disable tests and fake helpers for initialisation below.
|
||||
def skip_if_not_installed(_):
|
||||
return None
|
||||
else:
|
||||
def skip_if_not_installed(c):
|
||||
return c
|
||||
|
||||
try:
|
||||
# disable IPython history thread before it gets started to avoid having to clean it up
|
||||
from IPython.core.history import HistoryManager
|
||||
HistoryManager.enabled = False
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
code = u"""\
|
||||
def f(x):
|
||||
return 2*x
|
||||
"""
|
||||
|
||||
cython3_code = u"""\
|
||||
def f(int x):
|
||||
return 2 / x
|
||||
|
||||
def call(x):
|
||||
return f(*(x,))
|
||||
"""
|
||||
|
||||
pgo_cython3_code = cython3_code + u"""\
|
||||
def main():
|
||||
for _ in range(100): call(5)
|
||||
main()
|
||||
"""
|
||||
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# not using IPython's decorators here because they depend on "nose"
|
||||
try:
|
||||
from unittest import skip as skip_win32
|
||||
except ImportError:
|
||||
# poor dev's silent @unittest.skip()
|
||||
def skip_win32(dummy):
|
||||
def _skip_win32(func):
|
||||
return None
|
||||
return _skip_win32
|
||||
else:
|
||||
def skip_win32(dummy):
|
||||
def _skip_win32(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
func(*args, **kwargs)
|
||||
return wrapper
|
||||
return _skip_win32
|
||||
|
||||
|
||||
@skip_if_not_installed
|
||||
class TestIPythonMagic(CythonTest):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
CythonTest.setUpClass()
|
||||
cls._ip = IPython.testing.globalipapp.get_ipython()
|
||||
|
||||
def setUp(self):
|
||||
CythonTest.setUp(self)
|
||||
self._ip.extension_manager.load_extension('cython')
|
||||
|
||||
def test_cython_inline(self):
|
||||
ip = self._ip
|
||||
ip.ex('a=10; b=20')
|
||||
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
|
||||
self.assertEqual(result, 30)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_cython_pyximport(self):
|
||||
ip = self._ip
|
||||
module_name = '_test_cython_pyximport'
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
ip.run_cell_magic('cython_pyximport', module_name, code)
|
||||
ip.ex('h = f(-10)')
|
||||
self.assertEqual(ip.user_ns['h'], -20.0)
|
||||
try:
|
||||
os.remove(module_name + '.pyx')
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_cython(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_name(self):
|
||||
# The Cython module named 'mymodule' defines the function f.
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--name=mymodule', code)
|
||||
# This module can now be imported in the interactive namespace.
|
||||
ip.ex('import mymodule; g = mymodule.f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_language_level(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
if sys.version_info[0] < 3:
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
else:
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython3(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
def test_cython2(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-2', cython3_code)
|
||||
ip.ex('g = f(10); h = call(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
||||
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_cython3_pgo(self):
|
||||
# The Cython cell defines the functions f() and call().
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
|
||||
ip.ex('g = f(10); h = call(10); main()')
|
||||
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
||||
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
||||
|
||||
@skip_win32('Skip on Windows')
|
||||
def test_extlibs(self):
|
||||
ip = self._ip
|
||||
code = u"""
|
||||
from libc.math cimport sin
|
||||
x = sin(0.0)
|
||||
"""
|
||||
ip.user_ns['x'] = 1
|
||||
ip.run_cell_magic('cython', '-l m', code)
|
||||
self.assertEqual(ip.user_ns['x'], 0)
|
||||
|
||||
|
||||
def test_cython_verbose(self):
|
||||
ip = self._ip
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
|
||||
def test_cython_verbose_thresholds(self):
|
||||
@contextmanager
|
||||
def mock_distutils():
|
||||
class MockLog:
|
||||
DEBUG = 1
|
||||
INFO = 2
|
||||
thresholds = [INFO]
|
||||
|
||||
def set_threshold(self, val):
|
||||
self.thresholds.append(val)
|
||||
return self.thresholds[-2]
|
||||
|
||||
|
||||
new_log = MockLog()
|
||||
old_log = IpythonMagic.distutils.log
|
||||
try:
|
||||
IpythonMagic.distutils.log = new_log
|
||||
yield new_log
|
||||
finally:
|
||||
IpythonMagic.distutils.log = old_log
|
||||
|
||||
ip = self._ip
|
||||
with mock_distutils() as verbose_log:
|
||||
ip.run_cell_magic('cython', '--verbose', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
|
||||
verbose_log.thresholds)
|
||||
|
||||
with mock_distutils() as normal_log:
|
||||
ip.run_cell_magic('cython', '', code)
|
||||
ip.ex('g = f(10)')
|
||||
self.assertEqual(ip.user_ns['g'], 20.0)
|
||||
self.assertEqual([normal_log.INFO], normal_log.thresholds)
|
@ -0,0 +1,57 @@
|
||||
from Cython.Build.Dependencies import strip_string_literals
|
||||
|
||||
from Cython.TestUtils import CythonTest
|
||||
|
||||
class TestStripLiterals(CythonTest):
|
||||
|
||||
def t(self, before, expected):
|
||||
actual, literals = strip_string_literals(before, prefix="_L")
|
||||
self.assertEqual(expected, actual)
|
||||
for key, value in literals.items():
|
||||
actual = actual.replace(key, value)
|
||||
self.assertEqual(before, actual)
|
||||
|
||||
def test_empty(self):
|
||||
self.t("", "")
|
||||
|
||||
def test_single_quote(self):
|
||||
self.t("'x'", "'_L1_'")
|
||||
|
||||
def test_double_quote(self):
|
||||
self.t('"x"', '"_L1_"')
|
||||
|
||||
def test_nested_quotes(self):
|
||||
self.t(""" '"' "'" """, """ '_L1_' "_L2_" """)
|
||||
|
||||
def test_triple_quote(self):
|
||||
self.t(" '''a\n''' ", " '''_L1_''' ")
|
||||
|
||||
def test_backslash(self):
|
||||
self.t(r"'a\'b'", "'_L1_'")
|
||||
self.t(r"'a\\'", "'_L1_'")
|
||||
self.t(r"'a\\\'b'", "'_L1_'")
|
||||
|
||||
def test_unicode(self):
|
||||
self.t("u'abc'", "u'_L1_'")
|
||||
|
||||
def test_raw(self):
|
||||
self.t(r"r'abc\\'", "r'_L1_'")
|
||||
|
||||
def test_raw_unicode(self):
|
||||
self.t(r"ru'abc\\'", "ru'_L1_'")
|
||||
|
||||
def test_comment(self):
|
||||
self.t("abc # foo", "abc #_L1_")
|
||||
|
||||
def test_comment_and_quote(self):
|
||||
self.t("abc # 'x'", "abc #_L1_")
|
||||
self.t("'abc#'", "'_L1_'")
|
||||
|
||||
def test_include(self):
|
||||
self.t("include 'a.pxi' # something here",
|
||||
"include '_L1_' #_L2_")
|
||||
|
||||
def test_extern(self):
|
||||
self.t("cdef extern from 'a.h': # comment",
|
||||
"cdef extern from '_L1_': #_L2_")
|
||||
|
@ -0,0 +1 @@
|
||||
# empty file
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,2 @@
|
||||
from .Dependencies import cythonize
|
||||
from .Distutils import build_ext
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
816
kivy_venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
816
kivy_venv/lib/python3.11/site-packages/Cython/CodeWriter.py
Normal file
@ -0,0 +1,816 @@
|
||||
"""
|
||||
Serializes a Cython code tree to Cython code. This is primarily useful for
|
||||
debugging and testing purposes.
|
||||
|
||||
The output is in a strict format, no whitespace or comments from the input
|
||||
is preserved (and it could not be as it is not present in the code tree).
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Compiler.Visitor import TreeVisitor
|
||||
from .Compiler.ExprNodes import *
|
||||
|
||||
|
||||
class LinesResult(object):
|
||||
def __init__(self):
|
||||
self.lines = []
|
||||
self.s = u""
|
||||
|
||||
def put(self, s):
|
||||
self.s += s
|
||||
|
||||
def newline(self):
|
||||
self.lines.append(self.s)
|
||||
self.s = u""
|
||||
|
||||
def putline(self, s):
|
||||
self.put(s)
|
||||
self.newline()
|
||||
|
||||
class DeclarationWriter(TreeVisitor):
|
||||
|
||||
indent_string = u" "
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(DeclarationWriter, self).__init__()
|
||||
if result is None:
|
||||
result = LinesResult()
|
||||
self.result = result
|
||||
self.numindents = 0
|
||||
self.tempnames = {}
|
||||
self.tempblockindex = 0
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def indent(self):
|
||||
self.numindents += 1
|
||||
|
||||
def dedent(self):
|
||||
self.numindents -= 1
|
||||
|
||||
def startline(self, s=u""):
|
||||
self.result.put(self.indent_string * self.numindents + s)
|
||||
|
||||
def put(self, s):
|
||||
self.result.put(s)
|
||||
|
||||
def putline(self, s):
|
||||
self.result.putline(self.indent_string * self.numindents + s)
|
||||
|
||||
def endline(self, s=u""):
|
||||
self.result.putline(s)
|
||||
|
||||
def line(self, s):
|
||||
self.startline(s)
|
||||
self.endline()
|
||||
|
||||
def comma_separated_list(self, items, output_rhs=False):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
if output_rhs and item.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(item.default)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_StatListNode(self, node):
|
||||
self.visitchildren(node)
|
||||
|
||||
def visit_CDefExternNode(self, node):
|
||||
if node.include_file is None:
|
||||
file = u'*'
|
||||
else:
|
||||
file = u'"%s"' % node.include_file
|
||||
self.putline(u"cdef extern from %s:" % file)
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CPtrDeclaratorNode(self, node):
|
||||
self.put('*')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CReferenceDeclaratorNode(self, node):
|
||||
self.put('&')
|
||||
self.visit(node.base)
|
||||
|
||||
def visit_CArrayDeclaratorNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u'[')
|
||||
if node.dimension is not None:
|
||||
self.visit(node.dimension)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CArrayDeclaratorNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u'[')
|
||||
if node.dimension is not None:
|
||||
self.visit(node.dimension)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CFuncDeclaratorNode(self, node):
|
||||
# TODO: except, gil, etc.
|
||||
self.visit(node.base)
|
||||
self.put(u'(')
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u')')
|
||||
|
||||
def visit_CNameDeclaratorNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CSimpleBaseTypeNode(self, node):
|
||||
# See Parsing.p_sign_and_longness
|
||||
if node.is_basic_c_type:
|
||||
self.put(("unsigned ", "", "signed ")[node.signed])
|
||||
if node.longness < 0:
|
||||
self.put("short " * -node.longness)
|
||||
elif node.longness > 0:
|
||||
self.put("long " * node.longness)
|
||||
self.put(node.name)
|
||||
|
||||
def visit_CComplexBaseTypeNode(self, node):
|
||||
self.put(u'(')
|
||||
self.visit(node.base_type)
|
||||
self.visit(node.declarator)
|
||||
self.put(u')')
|
||||
|
||||
def visit_CNestedBaseTypeNode(self, node):
|
||||
self.visit(node.base_type)
|
||||
self.put(u'.')
|
||||
self.put(node.name)
|
||||
|
||||
def visit_TemplatedTypeNode(self, node):
|
||||
self.visit(node.base_type_node)
|
||||
self.put(u'[')
|
||||
self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
|
||||
self.put(u']')
|
||||
|
||||
def visit_CVarDefNode(self, node):
|
||||
self.startline(u"cdef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.comma_separated_list(node.declarators, output_rhs=True)
|
||||
self.endline()
|
||||
|
||||
def visit_container_node(self, node, decl, extras, attributes):
|
||||
# TODO: visibility
|
||||
self.startline(decl)
|
||||
if node.name:
|
||||
self.put(u' ')
|
||||
self.put(node.name)
|
||||
if node.cname is not None:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if extras:
|
||||
self.put(extras)
|
||||
self.endline(':')
|
||||
self.indent()
|
||||
if not attributes:
|
||||
self.putline('pass')
|
||||
else:
|
||||
for attribute in attributes:
|
||||
self.visit(attribute)
|
||||
self.dedent()
|
||||
|
||||
def visit_CStructOrUnionDefNode(self, node):
|
||||
if node.typedef_flag:
|
||||
decl = u'ctypedef '
|
||||
else:
|
||||
decl = u'cdef '
|
||||
if node.visibility == 'public':
|
||||
decl += u'public '
|
||||
if node.packed:
|
||||
decl += u'packed '
|
||||
decl += node.kind
|
||||
self.visit_container_node(node, decl, None, node.attributes)
|
||||
|
||||
def visit_CppClassNode(self, node):
|
||||
extras = ""
|
||||
if node.templates:
|
||||
extras = u"[%s]" % ", ".join(node.templates)
|
||||
if node.base_classes:
|
||||
extras += "(%s)" % ", ".join(node.base_classes)
|
||||
self.visit_container_node(node, u"cdef cppclass", extras, node.attributes)
|
||||
|
||||
def visit_CEnumDefNode(self, node):
|
||||
self.visit_container_node(node, u"cdef enum", None, node.items)
|
||||
|
||||
def visit_CEnumDefItemNode(self, node):
|
||||
self.startline(node.name)
|
||||
if node.cname:
|
||||
self.put(u' "%s"' % node.cname)
|
||||
if node.value:
|
||||
self.put(u" = ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_CClassDefNode(self, node):
|
||||
assert not node.module_name
|
||||
if node.decorators:
|
||||
for decorator in node.decorators:
|
||||
self.visit(decorator)
|
||||
self.startline(u"cdef class ")
|
||||
self.put(node.class_name)
|
||||
if node.base_class_name:
|
||||
self.put(u"(")
|
||||
if node.base_class_module:
|
||||
self.put(node.base_class_module)
|
||||
self.put(u".")
|
||||
self.put(node.base_class_name)
|
||||
self.put(u")")
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CTypeDefNode(self, node):
|
||||
self.startline(u"ctypedef ")
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
self.endline()
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.startline(u"def %s(" % node.name)
|
||||
self.comma_separated_list(node.args)
|
||||
self.endline(u"):")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_CArgDeclNode(self, node):
|
||||
if node.base_type.name is not None:
|
||||
self.visit(node.base_type)
|
||||
self.put(u" ")
|
||||
self.visit(node.declarator)
|
||||
if node.default is not None:
|
||||
self.put(u" = ")
|
||||
self.visit(node.default)
|
||||
|
||||
def visit_CImportStatNode(self, node):
|
||||
self.startline(u"cimport ")
|
||||
self.put(node.module_name)
|
||||
if node.as_name:
|
||||
self.put(u" as ")
|
||||
self.put(node.as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_FromCImportStatNode(self, node):
|
||||
self.startline(u"from ")
|
||||
self.put(node.module_name)
|
||||
self.put(u" cimport ")
|
||||
first = True
|
||||
for pos, name, as_name, kind in node.imported_names:
|
||||
assert kind is None
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
self.put(u", ")
|
||||
self.put(name)
|
||||
if as_name:
|
||||
self.put(u" as ")
|
||||
self.put(as_name)
|
||||
self.endline()
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_IntNode(self, node):
|
||||
self.put(node.value)
|
||||
|
||||
def visit_NoneNode(self, node):
|
||||
self.put(u"None")
|
||||
|
||||
def visit_NotNode(self, node):
|
||||
self.put(u"(not ")
|
||||
self.visit(node.operand)
|
||||
self.put(u")")
|
||||
|
||||
def visit_DecoratorNode(self, node):
|
||||
self.startline("@")
|
||||
self.visit(node.decorator)
|
||||
self.endline()
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
self.visit(node.operand1)
|
||||
self.put(u" %s " % node.operator)
|
||||
self.visit(node.operand2)
|
||||
|
||||
def visit_AttributeNode(self, node):
|
||||
self.visit(node.obj)
|
||||
self.put(u".%s" % node.attribute)
|
||||
|
||||
def visit_BoolNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
# FIXME: represent string nodes correctly
|
||||
def visit_StringNode(self, node):
|
||||
value = node.value
|
||||
if value.encoding is not None:
|
||||
value = value.encode(value.encoding)
|
||||
self.put(repr(value))
|
||||
|
||||
def visit_PassStatNode(self, node):
|
||||
self.startline(u"pass")
|
||||
self.endline()
|
||||
|
||||
class CodeWriter(DeclarationWriter):
|
||||
|
||||
def visit_SingleAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_CascadedAssignmentNode(self, node):
|
||||
self.startline()
|
||||
for lhs in node.lhs_list:
|
||||
self.visit(lhs)
|
||||
self.put(u" = ")
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_PrintStatNode(self, node):
|
||||
self.startline(u"print ")
|
||||
self.comma_separated_list(node.arg_tuple.args)
|
||||
if not node.append_newline:
|
||||
self.put(u",")
|
||||
self.endline()
|
||||
|
||||
def visit_ForInStatNode(self, node):
|
||||
self.startline(u"for ")
|
||||
self.visit(node.target)
|
||||
self.put(u" in ")
|
||||
self.visit(node.iterator.sequence)
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
if node.else_clause is not None:
|
||||
self.line(u"else:")
|
||||
self.indent()
|
||||
self.visit(node.else_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_IfStatNode(self, node):
|
||||
# The IfClauseNode is handled directly without a separate match
|
||||
# for clariy.
|
||||
self.startline(u"if ")
|
||||
self.visit(node.if_clauses[0].condition)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(node.if_clauses[0].body)
|
||||
self.dedent()
|
||||
for clause in node.if_clauses[1:]:
|
||||
self.startline("elif ")
|
||||
self.visit(clause.condition)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(clause.body)
|
||||
self.dedent()
|
||||
if node.else_clause is not None:
|
||||
self.line("else:")
|
||||
self.indent()
|
||||
self.visit(node.else_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_SequenceNode(self, node):
|
||||
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.comma_separated_list(node.args)
|
||||
self.put(")")
|
||||
|
||||
def visit_GeneralCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
posarg = node.positional_args
|
||||
if isinstance(posarg, AsTupleNode):
|
||||
self.visit(posarg.arg)
|
||||
else:
|
||||
self.comma_separated_list(posarg.args) # TupleNode.args
|
||||
if node.keyword_args:
|
||||
if isinstance(node.keyword_args, DictNode):
|
||||
for i, (name, value) in enumerate(node.keyword_args.key_value_pairs):
|
||||
if i > 0:
|
||||
self.put(', ')
|
||||
self.visit(name)
|
||||
self.put('=')
|
||||
self.visit(value)
|
||||
else:
|
||||
raise Exception("Not implemented yet")
|
||||
self.put(u")")
|
||||
|
||||
def visit_ExprStatNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.expr)
|
||||
self.endline()
|
||||
|
||||
def visit_InPlaceAssignmentNode(self, node):
|
||||
self.startline()
|
||||
self.visit(node.lhs)
|
||||
self.put(u" %s= " % node.operator)
|
||||
self.visit(node.rhs)
|
||||
self.endline()
|
||||
|
||||
def visit_WithStatNode(self, node):
|
||||
self.startline()
|
||||
self.put(u"with ")
|
||||
self.visit(node.manager)
|
||||
if node.target is not None:
|
||||
self.put(u" as ")
|
||||
self.visit(node.target)
|
||||
self.endline(u":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_TryFinallyStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
self.line(u"finally:")
|
||||
self.indent()
|
||||
self.visit(node.finally_clause)
|
||||
self.dedent()
|
||||
|
||||
def visit_TryExceptStatNode(self, node):
|
||||
self.line(u"try:")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
for x in node.except_clauses:
|
||||
self.visit(x)
|
||||
if node.else_clause is not None:
|
||||
self.visit(node.else_clause)
|
||||
|
||||
def visit_ExceptClauseNode(self, node):
|
||||
self.startline(u"except")
|
||||
if node.pattern is not None:
|
||||
self.put(u" ")
|
||||
self.visit(node.pattern)
|
||||
if node.target is not None:
|
||||
self.put(u", ")
|
||||
self.visit(node.target)
|
||||
self.endline(":")
|
||||
self.indent()
|
||||
self.visit(node.body)
|
||||
self.dedent()
|
||||
|
||||
def visit_ReturnStatNode(self, node):
|
||||
self.startline("return ")
|
||||
self.visit(node.value)
|
||||
self.endline()
|
||||
|
||||
def visit_ReraiseStatNode(self, node):
|
||||
self.line("raise")
|
||||
|
||||
def visit_ImportNode(self, node):
|
||||
self.put(u"(import %s)" % node.module_name.value)
|
||||
|
||||
def visit_TempsBlockNode(self, node):
|
||||
"""
|
||||
Temporaries are output like $1_1', where the first number is
|
||||
an index of the TempsBlockNode and the second number is an index
|
||||
of the temporary which that block allocates.
|
||||
"""
|
||||
idx = 0
|
||||
for handle in node.temps:
|
||||
self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
|
||||
idx += 1
|
||||
self.tempblockindex += 1
|
||||
self.visit(node.body)
|
||||
|
||||
def visit_TempRefNode(self, node):
|
||||
self.put(self.tempnames[node.handle])
|
||||
|
||||
|
||||
class PxdWriter(DeclarationWriter):
|
||||
def __call__(self, node):
|
||||
print(u'\n'.join(self.write(node).lines))
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if 'inline' in node.modifiers:
|
||||
return
|
||||
if node.overridable:
|
||||
self.startline(u'cpdef ')
|
||||
else:
|
||||
self.startline(u'cdef ')
|
||||
if node.visibility != 'private':
|
||||
self.put(node.visibility)
|
||||
self.put(u' ')
|
||||
if node.api:
|
||||
self.put(u'api ')
|
||||
self.visit(node.declarator)
|
||||
|
||||
def visit_StatNode(self, node):
|
||||
pass
|
||||
|
||||
|
||||
class ExpressionWriter(TreeVisitor):
|
||||
|
||||
def __init__(self, result=None):
|
||||
super(ExpressionWriter, self).__init__()
|
||||
if result is None:
|
||||
result = u""
|
||||
self.result = result
|
||||
self.precedence = [0]
|
||||
|
||||
def write(self, tree):
|
||||
self.visit(tree)
|
||||
return self.result
|
||||
|
||||
def put(self, s):
|
||||
self.result += s
|
||||
|
||||
def remove(self, s):
|
||||
if self.result.endswith(s):
|
||||
self.result = self.result[:-len(s)]
|
||||
|
||||
def comma_separated_list(self, items):
|
||||
if len(items) > 0:
|
||||
for item in items[:-1]:
|
||||
self.visit(item)
|
||||
self.put(u", ")
|
||||
self.visit(items[-1])
|
||||
|
||||
def visit_Node(self, node):
|
||||
raise AssertionError("Node not handled by serializer: %r" % node)
|
||||
|
||||
def visit_NameNode(self, node):
|
||||
self.put(node.name)
|
||||
|
||||
def visit_NoneNode(self, node):
|
||||
self.put(u"None")
|
||||
|
||||
def visit_EllipsisNode(self, node):
|
||||
self.put(u"...")
|
||||
|
||||
def visit_BoolNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ConstNode(self, node):
|
||||
self.put(str(node.value))
|
||||
|
||||
def visit_ImagNode(self, node):
|
||||
self.put(node.value)
|
||||
self.put(u"j")
|
||||
|
||||
def emit_string(self, node, prefix=u""):
|
||||
repr_val = repr(node.value)
|
||||
if repr_val[0] in 'ub':
|
||||
repr_val = repr_val[1:]
|
||||
self.put(u"%s%s" % (prefix, repr_val))
|
||||
|
||||
def visit_BytesNode(self, node):
|
||||
self.emit_string(node, u"b")
|
||||
|
||||
def visit_StringNode(self, node):
|
||||
self.emit_string(node)
|
||||
|
||||
def visit_UnicodeNode(self, node):
|
||||
self.emit_string(node, u"u")
|
||||
|
||||
def emit_sequence(self, node, parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
items = node.subexpr_nodes()
|
||||
self.put(open_paren)
|
||||
self.comma_separated_list(items)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ListNode(self, node):
|
||||
self.emit_sequence(node, u"[]")
|
||||
|
||||
def visit_TupleNode(self, node):
|
||||
self.emit_sequence(node, u"()")
|
||||
|
||||
def visit_SetNode(self, node):
|
||||
if len(node.subexpr_nodes()) > 0:
|
||||
self.emit_sequence(node, u"{}")
|
||||
else:
|
||||
self.put(u"set()")
|
||||
|
||||
def visit_DictNode(self, node):
|
||||
self.emit_sequence(node, u"{}")
|
||||
|
||||
def visit_DictItemNode(self, node):
|
||||
self.visit(node.key)
|
||||
self.put(u": ")
|
||||
self.visit(node.value)
|
||||
|
||||
unop_precedence = {
|
||||
'not': 3, '!': 3,
|
||||
'+': 11, '-': 11, '~': 11,
|
||||
}
|
||||
binop_precedence = {
|
||||
'or': 1,
|
||||
'and': 2,
|
||||
# unary: 'not': 3, '!': 3,
|
||||
'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
|
||||
'|': 5,
|
||||
'^': 6,
|
||||
'&': 7,
|
||||
'<<': 8, '>>': 8,
|
||||
'+': 9, '-': 9,
|
||||
'*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
|
||||
# unary: '+': 11, '-': 11, '~': 11
|
||||
'**': 12,
|
||||
}
|
||||
|
||||
def operator_enter(self, new_prec):
|
||||
old_prec = self.precedence[-1]
|
||||
if old_prec > new_prec:
|
||||
self.put(u"(")
|
||||
self.precedence.append(new_prec)
|
||||
|
||||
def operator_exit(self):
|
||||
old_prec, new_prec = self.precedence[-2:]
|
||||
if old_prec > new_prec:
|
||||
self.put(u")")
|
||||
self.precedence.pop()
|
||||
|
||||
def visit_NotNode(self, node):
|
||||
op = 'not'
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"not ")
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_UnopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.unop_precedence[op]
|
||||
self.operator_enter(prec)
|
||||
self.put(u"%s" % node.operator)
|
||||
self.visit(node.operand)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BinopNode(self, node):
|
||||
op = node.operator
|
||||
prec = self.binop_precedence.get(op, 0)
|
||||
self.operator_enter(prec)
|
||||
self.visit(node.operand1)
|
||||
self.put(u" %s " % op.replace('_', ' '))
|
||||
self.visit(node.operand2)
|
||||
self.operator_exit()
|
||||
|
||||
def visit_BoolBinopNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_PrimaryCmpNode(self, node):
|
||||
self.visit_BinopNode(node)
|
||||
|
||||
def visit_IndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if isinstance(node.index, TupleNode):
|
||||
self.emit_sequence(node.index)
|
||||
else:
|
||||
self.visit(node.index)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceIndexNode(self, node):
|
||||
self.visit(node.base)
|
||||
self.put(u"[")
|
||||
if node.start:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if node.stop:
|
||||
self.visit(node.stop)
|
||||
if node.slice:
|
||||
self.put(u":")
|
||||
self.visit(node.slice)
|
||||
self.put(u"]")
|
||||
|
||||
def visit_SliceNode(self, node):
|
||||
if not node.start.is_none:
|
||||
self.visit(node.start)
|
||||
self.put(u":")
|
||||
if not node.stop.is_none:
|
||||
self.visit(node.stop)
|
||||
if not node.step.is_none:
|
||||
self.put(u":")
|
||||
self.visit(node.step)
|
||||
|
||||
def visit_CondExprNode(self, node):
|
||||
self.visit(node.true_val)
|
||||
self.put(u" if ")
|
||||
self.visit(node.test)
|
||||
self.put(u" else ")
|
||||
self.visit(node.false_val)
|
||||
|
||||
def visit_AttributeNode(self, node):
|
||||
self.visit(node.obj)
|
||||
self.put(u".%s" % node.attribute)
|
||||
|
||||
def visit_SimpleCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.comma_separated_list(node.args)
|
||||
self.put(")")
|
||||
|
||||
def emit_pos_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, AddNode):
|
||||
self.emit_pos_args(node.operand1)
|
||||
self.emit_pos_args(node.operand2)
|
||||
elif isinstance(node, TupleNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.visit(expr)
|
||||
self.put(u", ")
|
||||
elif isinstance(node, AsTupleNode):
|
||||
self.put("*")
|
||||
self.visit(node.arg)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def emit_kwd_args(self, node):
|
||||
if node is None:
|
||||
return
|
||||
if isinstance(node, MergedDictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.emit_kwd_args(expr)
|
||||
elif isinstance(node, DictNode):
|
||||
for expr in node.subexpr_nodes():
|
||||
self.put(u"%s=" % expr.key.value)
|
||||
self.visit(expr.value)
|
||||
self.put(u", ")
|
||||
else:
|
||||
self.put(u"**")
|
||||
self.visit(node)
|
||||
self.put(u", ")
|
||||
|
||||
def visit_GeneralCallNode(self, node):
|
||||
self.visit(node.function)
|
||||
self.put(u"(")
|
||||
self.emit_pos_args(node.positional_args)
|
||||
self.emit_kwd_args(node.keyword_args)
|
||||
self.remove(u", ")
|
||||
self.put(")")
|
||||
|
||||
def emit_comprehension(self, body, target,
|
||||
sequence, condition,
|
||||
parens=(u"", u"")):
|
||||
open_paren, close_paren = parens
|
||||
self.put(open_paren)
|
||||
self.visit(body)
|
||||
self.put(u" for ")
|
||||
self.visit(target)
|
||||
self.put(u" in ")
|
||||
self.visit(sequence)
|
||||
if condition:
|
||||
self.put(u" if ")
|
||||
self.visit(condition)
|
||||
self.put(close_paren)
|
||||
|
||||
def visit_ComprehensionAppendNode(self, node):
|
||||
self.visit(node.expr)
|
||||
|
||||
def visit_DictComprehensionAppendNode(self, node):
|
||||
self.visit(node.key_expr)
|
||||
self.put(u": ")
|
||||
self.visit(node.value_expr)
|
||||
|
||||
def visit_ComprehensionNode(self, node):
|
||||
tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
|
||||
parens = tpmap[node.type.py_type_name()]
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body
|
||||
self.emit_comprehension(body, target, sequence, condition, parens)
|
||||
|
||||
def visit_GeneratorExpressionNode(self, node):
|
||||
body = node.loop.body
|
||||
target = node.loop.target
|
||||
sequence = node.loop.iterator.sequence
|
||||
condition = None
|
||||
if hasattr(body, 'if_clauses'):
|
||||
# type(body) is Nodes.IfStatNode
|
||||
condition = body.if_clauses[0].condition
|
||||
body = body.if_clauses[0].body.expr.arg
|
||||
elif hasattr(body, 'expr'):
|
||||
# type(body) is Nodes.ExprStatNode
|
||||
body = body.expr.arg
|
||||
self.emit_comprehension(body, target, sequence, condition, u"()")
|
@ -0,0 +1,99 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import ScopeTrackingTransform
|
||||
from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
|
||||
from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
|
||||
from .PyrexTypes import py_object_type
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Symtab
|
||||
|
||||
class AutoTestDictTransform(ScopeTrackingTransform):
|
||||
# Handles autotestdict directive
|
||||
|
||||
blacklist = ['__cinit__', '__dealloc__', '__richcmp__',
|
||||
'__nonzero__', '__bool__',
|
||||
'__len__', '__contains__']
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
if node.is_pxd:
|
||||
return node
|
||||
self.scope_type = 'module'
|
||||
self.scope_node = node
|
||||
|
||||
if not self.current_directives['autotestdict']:
|
||||
return node
|
||||
self.all_docstrings = self.current_directives['autotestdict.all']
|
||||
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
|
||||
|
||||
assert isinstance(node.body, StatListNode)
|
||||
|
||||
# First see if __test__ is already created
|
||||
if u'__test__' in node.scope.entries:
|
||||
# Do nothing
|
||||
return node
|
||||
|
||||
pos = node.pos
|
||||
|
||||
self.tests = []
|
||||
self.testspos = node.pos
|
||||
|
||||
test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'),
|
||||
py_object_type,
|
||||
pos,
|
||||
visibility='public')
|
||||
create_test_dict_assignment = SingleAssignmentNode(pos,
|
||||
lhs=NameNode(pos, name=EncodedString(u'__test__'),
|
||||
entry=test_dict_entry),
|
||||
rhs=DictNode(pos, key_value_pairs=self.tests))
|
||||
self.visitchildren(node)
|
||||
node.body.stats.append(create_test_dict_assignment)
|
||||
return node
|
||||
|
||||
def add_test(self, testpos, path, doctest):
|
||||
pos = self.testspos
|
||||
keystr = u'%s (line %d)' % (path, testpos[1])
|
||||
key = UnicodeNode(pos, value=EncodedString(keystr))
|
||||
value = UnicodeNode(pos, value=doctest)
|
||||
self.tests.append(DictItemNode(pos, key=key, value=value))
|
||||
|
||||
def visit_ExprNode(self, node):
|
||||
# expressions cannot contain functions and lambda expressions
|
||||
# do not have a docstring
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
|
||||
return node
|
||||
if not self.cdef_docstrings:
|
||||
if isinstance(node, CFuncDefNode) and not node.py_func:
|
||||
return node
|
||||
if not self.all_docstrings and '>>>' not in node.doc:
|
||||
return node
|
||||
|
||||
pos = self.testspos
|
||||
if self.scope_type == 'module':
|
||||
path = node.entry.name
|
||||
elif self.scope_type in ('pyclass', 'cclass'):
|
||||
if isinstance(node, CFuncDefNode):
|
||||
if node.py_func is not None:
|
||||
name = node.py_func.name
|
||||
else:
|
||||
name = node.entry.name
|
||||
else:
|
||||
name = node.name
|
||||
if self.scope_type == 'cclass' and name in self.blacklist:
|
||||
return node
|
||||
if self.scope_type == 'pyclass':
|
||||
class_name = self.scope_node.name
|
||||
else:
|
||||
class_name = self.scope_node.class_name
|
||||
if isinstance(node.entry.scope, Symtab.PropertyScope):
|
||||
property_method_name = node.entry.scope.name
|
||||
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
|
||||
node.entry.name)
|
||||
else:
|
||||
path = "%s.%s" % (class_name, node.entry.name)
|
||||
else:
|
||||
assert False
|
||||
self.add_test(node.pos, path, node.doc)
|
||||
return node
|
@ -0,0 +1,317 @@
|
||||
# Note: Work in progress
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import codecs
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from collections import defaultdict
|
||||
from xml.sax.saxutils import escape as html_escape
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO # does not support writing 'str' in Py2
|
||||
|
||||
from . import Version
|
||||
from .Code import CCodeWriter
|
||||
from .. import Utils
|
||||
|
||||
|
||||
class AnnotationCCodeWriter(CCodeWriter):
|
||||
|
||||
def __init__(self, create_from=None, buffer=None, copy_formatting=True):
|
||||
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting)
|
||||
if create_from is None:
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = None
|
||||
# annotations[filename][line] -> [(column, AnnotationItem)*]
|
||||
self.annotations = defaultdict(partial(defaultdict, list))
|
||||
# code[filename][line] -> str
|
||||
self.code = defaultdict(partial(defaultdict, str))
|
||||
# scopes[filename][line] -> set(scopes)
|
||||
self.scopes = defaultdict(partial(defaultdict, set))
|
||||
else:
|
||||
# When creating an insertion point, keep references to the same database
|
||||
self.annotation_buffer = create_from.annotation_buffer
|
||||
self.annotations = create_from.annotations
|
||||
self.code = create_from.code
|
||||
self.scopes = create_from.scopes
|
||||
self.last_annotated_pos = create_from.last_annotated_pos
|
||||
|
||||
def create_new(self, create_from, buffer, copy_formatting):
|
||||
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
|
||||
|
||||
def write(self, s):
|
||||
CCodeWriter.write(self, s)
|
||||
self.annotation_buffer.write(s)
|
||||
|
||||
def mark_pos(self, pos, trace=True):
|
||||
if pos is not None:
|
||||
CCodeWriter.mark_pos(self, pos, trace)
|
||||
if self.funcstate and self.funcstate.scope:
|
||||
# lambdas and genexprs can result in multiple scopes per line => keep them in a set
|
||||
self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope)
|
||||
if self.last_annotated_pos:
|
||||
source_desc, line, _ = self.last_annotated_pos
|
||||
pos_code = self.code[source_desc.filename]
|
||||
pos_code[line] += self.annotation_buffer.getvalue()
|
||||
self.annotation_buffer = StringIO()
|
||||
self.last_annotated_pos = pos
|
||||
|
||||
def annotate(self, pos, item):
|
||||
self.annotations[pos[0].filename][pos[1]].append((pos[2], item))
|
||||
|
||||
def _css(self):
|
||||
"""css template will later allow to choose a colormap"""
|
||||
css = [self._css_template]
|
||||
for i in range(255):
|
||||
color = u"FFFF%02x" % int(255/(1+i/10.0))
|
||||
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
|
||||
try:
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
css.append(HtmlFormatter().get_style_defs('.cython'))
|
||||
return '\n'.join(css)
|
||||
|
||||
_css_template = textwrap.dedent("""
|
||||
body.cython { font-family: courier; font-size: 12; }
|
||||
|
||||
.cython.tag { }
|
||||
.cython.line { margin: 0em }
|
||||
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
||||
|
||||
.cython.line .run { background-color: #B0FFB0; }
|
||||
.cython.line .mis { background-color: #FFB0B0; }
|
||||
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
||||
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
||||
|
||||
.cython.code .py_c_api { color: red; }
|
||||
.cython.code .py_macro_api { color: #FF7000; }
|
||||
.cython.code .pyx_c_api { color: #FF3000; }
|
||||
.cython.code .pyx_macro_api { color: #FF7000; }
|
||||
.cython.code .refnanny { color: #FFA000; }
|
||||
.cython.code .trace { color: #FFA000; }
|
||||
.cython.code .error_goto { color: #FFA000; }
|
||||
|
||||
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
||||
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_attr { color: #0000FF; }
|
||||
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_call { color: #0000FF; }
|
||||
""")
|
||||
|
||||
# on-click toggle function to show/hide C source code
|
||||
_onclick_attr = ' onclick="{0}"'.format((
|
||||
"(function(s){"
|
||||
" s.display = s.display === 'block' ? 'none' : 'block'"
|
||||
"})(this.nextElementSibling.style)"
|
||||
).replace(' ', '') # poor dev's JS minification
|
||||
)
|
||||
|
||||
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
|
||||
with Utils.open_source_file(source_filename) as f:
|
||||
code = f.read()
|
||||
generated_code = self.code.get(source_filename, {})
|
||||
c_file = Utils.decode_filename(os.path.basename(target_filename))
|
||||
html_filename = os.path.splitext(target_filename)[0] + ".html"
|
||||
|
||||
with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer:
|
||||
out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml))
|
||||
|
||||
def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None):
|
||||
coverage_info = ''
|
||||
if coverage_timestamp:
|
||||
coverage_info = u' with coverage data from {timestamp}'.format(
|
||||
timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000))
|
||||
|
||||
outlist = [
|
||||
textwrap.dedent(u'''\
|
||||
<!DOCTYPE html>
|
||||
<!-- Generated by Cython {watermark} -->
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Cython: {filename}</title>
|
||||
<style type="text/css">
|
||||
{css}
|
||||
</style>
|
||||
</head>
|
||||
<body class="cython">
|
||||
<p><span style="border-bottom: solid 1px grey;">Generated by Cython {watermark}</span>{more_info}</p>
|
||||
<p>
|
||||
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
||||
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
||||
</p>
|
||||
''').format(css=self._css(), watermark=Version.watermark,
|
||||
filename=os.path.basename(source_filename) if source_filename else '',
|
||||
more_info=coverage_info)
|
||||
]
|
||||
if c_file:
|
||||
outlist.append(u'<p>Raw output: <a href="%s">%s</a></p>\n' % (c_file, c_file))
|
||||
return outlist
|
||||
|
||||
def _save_annotation_footer(self):
|
||||
return (u'</body></html>\n',)
|
||||
|
||||
def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None):
|
||||
"""
|
||||
lines : original cython source code split by lines
|
||||
generated_code : generated c code keyed by line number in original file
|
||||
target filename : name of the file in which to store the generated html
|
||||
c_file : filename in which the c_code has been written
|
||||
"""
|
||||
if coverage_xml is not None and source_filename:
|
||||
coverage_timestamp = coverage_xml.get('timestamp', '').strip()
|
||||
covered_lines = self._get_line_coverage(coverage_xml, source_filename)
|
||||
else:
|
||||
coverage_timestamp = covered_lines = None
|
||||
annotation_items = dict(self.annotations[source_filename])
|
||||
scopes = dict(self.scopes[source_filename])
|
||||
|
||||
outlist = []
|
||||
outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp))
|
||||
outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines))
|
||||
outlist.extend(self._save_annotation_footer())
|
||||
return ''.join(outlist)
|
||||
|
||||
def _get_line_coverage(self, coverage_xml, source_filename):
|
||||
coverage_data = None
|
||||
for entry in coverage_xml.iterfind('.//class'):
|
||||
if not entry.get('filename'):
|
||||
continue
|
||||
if (entry.get('filename') == source_filename or
|
||||
os.path.abspath(entry.get('filename')) == source_filename):
|
||||
coverage_data = entry
|
||||
break
|
||||
elif source_filename.endswith(entry.get('filename')):
|
||||
coverage_data = entry # but we might still find a better match...
|
||||
if coverage_data is None:
|
||||
return None
|
||||
return dict(
|
||||
(int(line.get('number')), int(line.get('hits')))
|
||||
for line in coverage_data.iterfind('lines/line')
|
||||
)
|
||||
|
||||
def _htmlify_code(self, code):
|
||||
try:
|
||||
from pygments import highlight
|
||||
from pygments.lexers import CythonLexer
|
||||
from pygments.formatters import HtmlFormatter
|
||||
except ImportError:
|
||||
# no Pygments, just escape the code
|
||||
return html_escape(code)
|
||||
|
||||
html_code = highlight(
|
||||
code, CythonLexer(stripnl=False, stripall=False),
|
||||
HtmlFormatter(nowrap=True))
|
||||
return html_code
|
||||
|
||||
def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None):
|
||||
outlist = [u'<div class="cython">']
|
||||
pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
|
||||
new_calls_map = dict(
|
||||
(name, 0) for name in
|
||||
'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split()
|
||||
).copy
|
||||
|
||||
self.mark_pos(None)
|
||||
|
||||
def annotate(match):
|
||||
group_name = match.lastgroup
|
||||
calls[group_name] += 1
|
||||
return u"<span class='%s'>%s</span>" % (
|
||||
group_name, match.group(group_name))
|
||||
|
||||
lines = self._htmlify_code(cython_code).splitlines()
|
||||
lineno_width = len(str(len(lines)))
|
||||
if not covered_lines:
|
||||
covered_lines = None
|
||||
|
||||
for k, line in enumerate(lines, 1):
|
||||
try:
|
||||
c_code = generated_code[k]
|
||||
except KeyError:
|
||||
c_code = ''
|
||||
else:
|
||||
c_code = _replace_pos_comment(pos_comment_marker, c_code)
|
||||
if c_code.startswith(pos_comment_marker):
|
||||
c_code = c_code[len(pos_comment_marker):]
|
||||
c_code = html_escape(c_code)
|
||||
|
||||
calls = new_calls_map()
|
||||
c_code = _parse_code(annotate, c_code)
|
||||
score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
|
||||
calls['py_macro_api'] + calls['pyx_macro_api'])
|
||||
|
||||
if c_code:
|
||||
onclick = self._onclick_attr
|
||||
expandsymbol = '+'
|
||||
else:
|
||||
onclick = ''
|
||||
expandsymbol = ' '
|
||||
|
||||
covered = ''
|
||||
if covered_lines is not None and k in covered_lines:
|
||||
hits = covered_lines[k]
|
||||
if hits is not None:
|
||||
covered = 'run' if hits else 'mis'
|
||||
|
||||
outlist.append(
|
||||
u'<pre class="cython line score-{score}"{onclick}>'
|
||||
# generate line number with expand symbol in front,
|
||||
# and the right number of digit
|
||||
u'{expandsymbol}<span class="{covered}">{line:0{lineno_width}d}</span>: {code}</pre>\n'.format(
|
||||
score=score,
|
||||
expandsymbol=expandsymbol,
|
||||
covered=covered,
|
||||
lineno_width=lineno_width,
|
||||
line=k,
|
||||
code=line.rstrip(),
|
||||
onclick=onclick,
|
||||
))
|
||||
if c_code:
|
||||
outlist.append(u"<pre class='cython code score-{score} {covered}'>{code}</pre>".format(
|
||||
score=score, covered=covered, code=c_code))
|
||||
outlist.append(u"</div>")
|
||||
return outlist
|
||||
|
||||
|
||||
_parse_code = re.compile((
|
||||
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
|
||||
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
|
||||
br'(?:'
|
||||
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
|
||||
br'(?P<pyx_c_api>(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|'
|
||||
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
|
||||
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
|
||||
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
|
||||
br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
|
||||
).decode('ascii')).sub
|
||||
|
||||
|
||||
_replace_pos_comment = re.compile(
|
||||
# this matches what Cython generates as code line marker comment
|
||||
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
|
||||
re.M
|
||||
).sub
|
||||
|
||||
|
||||
class AnnotationItem(object):
|
||||
|
||||
def __init__(self, style, text, tag="", size=0):
|
||||
self.style = style
|
||||
self.text = text
|
||||
self.tag = tag
|
||||
self.size = size
|
||||
|
||||
def start(self):
|
||||
return u"<span class='cython tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
|
||||
|
||||
def end(self):
|
||||
return self.size, u"</span>"
|
@ -0,0 +1,214 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .Visitor import CythonTransform
|
||||
from .StringEncoding import EncodedString
|
||||
from . import Options
|
||||
from . import PyrexTypes, ExprNodes
|
||||
from ..CodeWriter import ExpressionWriter
|
||||
|
||||
|
||||
class AnnotationWriter(ExpressionWriter):
|
||||
|
||||
def visit_Node(self, node):
|
||||
self.put(u"<???>")
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# XXX Should we do better?
|
||||
self.put("<lambda>")
|
||||
|
||||
|
||||
class EmbedSignature(CythonTransform):
|
||||
|
||||
def __init__(self, context):
|
||||
super(EmbedSignature, self).__init__(context)
|
||||
self.class_name = None
|
||||
self.class_node = None
|
||||
|
||||
def _fmt_expr(self, node):
|
||||
writer = AnnotationWriter()
|
||||
result = writer.write(node)
|
||||
# print(type(node).__name__, '-->', result)
|
||||
return result
|
||||
|
||||
def _fmt_arg(self, arg):
|
||||
if arg.type is PyrexTypes.py_object_type or arg.is_self_arg:
|
||||
doc = arg.name
|
||||
else:
|
||||
doc = arg.type.declaration_code(arg.name, for_display=1)
|
||||
|
||||
if arg.annotation:
|
||||
annotation = self._fmt_expr(arg.annotation)
|
||||
doc = doc + (': %s' % annotation)
|
||||
if arg.default:
|
||||
default = self._fmt_expr(arg.default)
|
||||
doc = doc + (' = %s' % default)
|
||||
elif arg.default:
|
||||
default = self._fmt_expr(arg.default)
|
||||
doc = doc + ('=%s' % default)
|
||||
return doc
|
||||
|
||||
def _fmt_star_arg(self, arg):
|
||||
arg_doc = arg.name
|
||||
if arg.annotation:
|
||||
annotation = self._fmt_expr(arg.annotation)
|
||||
arg_doc = arg_doc + (': %s' % annotation)
|
||||
return arg_doc
|
||||
|
||||
def _fmt_arglist(self, args,
|
||||
npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
hide_self=False):
|
||||
arglist = []
|
||||
for arg in args:
|
||||
if not hide_self or not arg.entry.is_self_arg:
|
||||
arg_doc = self._fmt_arg(arg)
|
||||
arglist.append(arg_doc)
|
||||
if pargs:
|
||||
arg_doc = self._fmt_star_arg(pargs)
|
||||
arglist.insert(npargs, '*%s' % arg_doc)
|
||||
elif nkargs:
|
||||
arglist.insert(npargs, '*')
|
||||
if kargs:
|
||||
arg_doc = self._fmt_star_arg(kargs)
|
||||
arglist.append('**%s' % arg_doc)
|
||||
return arglist
|
||||
|
||||
def _fmt_ret_type(self, ret):
|
||||
if ret is PyrexTypes.py_object_type:
|
||||
return None
|
||||
else:
|
||||
return ret.declaration_code("", for_display=1)
|
||||
|
||||
def _fmt_signature(self, cls_name, func_name, args,
|
||||
npargs=0, pargs=None,
|
||||
nkargs=0, kargs=None,
|
||||
return_expr=None,
|
||||
return_type=None, hide_self=False):
|
||||
arglist = self._fmt_arglist(args,
|
||||
npargs, pargs,
|
||||
nkargs, kargs,
|
||||
hide_self=hide_self)
|
||||
arglist_doc = ', '.join(arglist)
|
||||
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
||||
if cls_name:
|
||||
func_doc = '%s.%s' % (cls_name, func_doc)
|
||||
ret_doc = None
|
||||
if return_expr:
|
||||
ret_doc = self._fmt_expr(return_expr)
|
||||
elif return_type:
|
||||
ret_doc = self._fmt_ret_type(return_type)
|
||||
if ret_doc:
|
||||
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
||||
return func_doc
|
||||
|
||||
def _embed_signature(self, signature, node_doc):
|
||||
if node_doc:
|
||||
return "%s\n%s" % (signature, node_doc)
|
||||
else:
|
||||
return signature
|
||||
|
||||
def __call__(self, node):
|
||||
if not Options.docstrings:
|
||||
return node
|
||||
else:
|
||||
return super(EmbedSignature, self).__call__(node)
|
||||
|
||||
def visit_ClassDefNode(self, node):
|
||||
oldname = self.class_name
|
||||
oldclass = self.class_node
|
||||
self.class_node = node
|
||||
try:
|
||||
# PyClassDefNode
|
||||
self.class_name = node.name
|
||||
except AttributeError:
|
||||
# CClassDefNode
|
||||
self.class_name = node.class_name
|
||||
self.visitchildren(node)
|
||||
self.class_name = oldname
|
||||
self.class_node = oldclass
|
||||
return node
|
||||
|
||||
def visit_LambdaNode(self, node):
|
||||
# lambda expressions so not have signature or inner functions
|
||||
return node
|
||||
|
||||
def visit_DefNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
|
||||
is_constructor = False
|
||||
hide_self = False
|
||||
if node.entry.is_special:
|
||||
is_constructor = self.class_node and node.name == '__init__'
|
||||
if not is_constructor:
|
||||
return node
|
||||
class_name, func_name = None, self.class_name
|
||||
hide_self = True
|
||||
else:
|
||||
class_name, func_name = self.class_name, node.name
|
||||
|
||||
nkargs = getattr(node, 'num_kwonly_args', 0)
|
||||
npargs = len(node.args) - nkargs
|
||||
signature = self._fmt_signature(
|
||||
class_name, func_name, node.args,
|
||||
npargs, node.star_arg,
|
||||
nkargs, node.starstar_arg,
|
||||
return_expr=node.return_type_annotation,
|
||||
return_type=None, hide_self=hide_self)
|
||||
if signature:
|
||||
if is_constructor:
|
||||
doc_holder = self.class_node.entry.type.scope
|
||||
else:
|
||||
doc_holder = node.entry
|
||||
|
||||
if doc_holder.doc is not None:
|
||||
old_doc = doc_holder.doc
|
||||
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
doc_holder.doc = EncodedString(new_doc)
|
||||
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
||||
node.py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_CFuncDefNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
if not node.overridable: # not cpdef FOO(...):
|
||||
return node
|
||||
|
||||
signature = self._fmt_signature(
|
||||
self.class_name, node.declarator.base.name,
|
||||
node.declarator.args,
|
||||
return_type=node.return_type)
|
||||
if signature:
|
||||
if node.entry.doc is not None:
|
||||
old_doc = node.entry.doc
|
||||
elif getattr(node, 'py_func', None) is not None:
|
||||
old_doc = node.py_func.entry.doc
|
||||
else:
|
||||
old_doc = None
|
||||
new_doc = self._embed_signature(signature, old_doc)
|
||||
node.entry.doc = EncodedString(new_doc)
|
||||
if hasattr(node, 'py_func') and node.py_func is not None:
|
||||
node.py_func.entry.doc = EncodedString(new_doc)
|
||||
return node
|
||||
|
||||
def visit_PropertyNode(self, node):
|
||||
if not self.current_directives['embedsignature']:
|
||||
return node
|
||||
|
||||
entry = node.entry
|
||||
if entry.visibility == 'public':
|
||||
# property synthesised from a cdef public attribute
|
||||
type_name = entry.type.declaration_code("", for_display=1)
|
||||
if not entry.type.is_pyobject:
|
||||
type_name = "'%s'" % type_name
|
||||
elif entry.type.is_extension_type:
|
||||
type_name = entry.type.module_name + '.' + type_name
|
||||
signature = '%s: %s' % (entry.name, type_name)
|
||||
new_doc = self._embed_signature(signature, entry.doc)
|
||||
entry.doc = EncodedString(new_doc)
|
||||
return node
|
740
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Buffer.py
Normal file
740
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Buffer.py
Normal file
@ -0,0 +1,740 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import CythonTransform
|
||||
from .ModuleNode import ModuleNode
|
||||
from .Errors import CompileError
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from .Code import UtilityCode, TempitaUtilityCode
|
||||
|
||||
from . import Options
|
||||
from . import Interpreter
|
||||
from . import PyrexTypes
|
||||
from . import Naming
|
||||
from . import Symtab
|
||||
|
||||
def dedent(text, reindent=0):
|
||||
from textwrap import dedent
|
||||
text = dedent(text)
|
||||
if reindent > 0:
|
||||
indent = " " * reindent
|
||||
text = '\n'.join([indent + x for x in text.split('\n')])
|
||||
return text
|
||||
|
||||
class IntroduceBufferAuxiliaryVars(CythonTransform):
|
||||
|
||||
#
|
||||
# Entry point
|
||||
#
|
||||
|
||||
buffers_exists = False
|
||||
using_memoryview = False
|
||||
|
||||
def __call__(self, node):
|
||||
assert isinstance(node, ModuleNode)
|
||||
self.max_ndim = 0
|
||||
result = super(IntroduceBufferAuxiliaryVars, self).__call__(node)
|
||||
if self.buffers_exists:
|
||||
use_bufstruct_declare_code(node.scope)
|
||||
use_py2_buffer_functions(node.scope)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#
|
||||
# Basic operations for transforms
|
||||
#
|
||||
def handle_scope(self, node, scope):
|
||||
# For all buffers, insert extra variables in the scope.
|
||||
# The variables are also accessible from the buffer_info
|
||||
# on the buffer entry
|
||||
scope_items = scope.entries.items()
|
||||
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
|
||||
if len(bufvars) > 0:
|
||||
bufvars.sort(key=lambda entry: entry.name)
|
||||
self.buffers_exists = True
|
||||
|
||||
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
|
||||
if len(memviewslicevars) > 0:
|
||||
self.buffers_exists = True
|
||||
|
||||
|
||||
for (name, entry) in scope_items:
|
||||
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
|
||||
self.using_memoryview = True
|
||||
break
|
||||
del scope_items
|
||||
|
||||
if isinstance(node, ModuleNode) and len(bufvars) > 0:
|
||||
# for now...note that pos is wrong
|
||||
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
|
||||
for entry in bufvars:
|
||||
if entry.type.dtype.is_ptr:
|
||||
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
|
||||
|
||||
name = entry.name
|
||||
buftype = entry.type
|
||||
if buftype.ndim > Options.buffer_max_dims:
|
||||
raise CompileError(node.pos,
|
||||
"Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
|
||||
if buftype.ndim > self.max_ndim:
|
||||
self.max_ndim = buftype.ndim
|
||||
|
||||
# Declare auxiliary vars
|
||||
def decvar(type, prefix):
|
||||
cname = scope.mangle(prefix, name)
|
||||
aux_var = scope.declare_var(name=None, cname=cname,
|
||||
type=type, pos=node.pos)
|
||||
if entry.is_arg:
|
||||
aux_var.used = True # otherwise, NameNode will mark whether it is used
|
||||
|
||||
return aux_var
|
||||
|
||||
auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
|
||||
(PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
|
||||
pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]
|
||||
|
||||
entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)
|
||||
|
||||
scope.buffer_entries = bufvars
|
||||
self.scope = scope
|
||||
|
||||
def visit_ModuleNode(self, node):
|
||||
self.handle_scope(node, node.scope)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.handle_scope(node, node.local_scope)
|
||||
self.visitchildren(node)
|
||||
return node
|
||||
|
||||
#
|
||||
# Analysis
|
||||
#
|
||||
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
|
||||
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
|
||||
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
|
||||
|
||||
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
|
||||
ERR_BUF_TOO_MANY = 'Too many buffer options'
|
||||
ERR_BUF_DUP = '"%s" buffer option already supplied'
|
||||
ERR_BUF_MISSING = '"%s" missing'
|
||||
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
|
||||
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
|
||||
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
|
||||
ERR_BUF_BOOL = '"%s" must be a boolean'
|
||||
|
||||
def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
|
||||
"""
|
||||
Must be called during type analysis, as analyse is called
|
||||
on the dtype argument.
|
||||
|
||||
posargs and dictargs should consist of a list and a dict
|
||||
of tuples (value, pos). Defaults should be a dict of values.
|
||||
|
||||
Returns a dict containing all the options a buffer can have and
|
||||
its value (with the positions stripped).
|
||||
"""
|
||||
if defaults is None:
|
||||
defaults = buffer_defaults
|
||||
|
||||
posargs, dictargs = Interpreter.interpret_compiletime_options(
|
||||
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
|
||||
|
||||
if len(posargs) > buffer_positional_options_count:
|
||||
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
|
||||
|
||||
options = {}
|
||||
for name, (value, pos) in dictargs.items():
|
||||
if not name in buffer_options:
|
||||
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
||||
options[name] = value
|
||||
|
||||
for name, (value, pos) in zip(buffer_options, posargs):
|
||||
if not name in buffer_options:
|
||||
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
||||
if name in options:
|
||||
raise CompileError(pos, ERR_BUF_DUP % name)
|
||||
options[name] = value
|
||||
|
||||
# Check that they are all there and copy defaults
|
||||
for name in buffer_options:
|
||||
if not name in options:
|
||||
try:
|
||||
options[name] = defaults[name]
|
||||
except KeyError:
|
||||
if need_complete:
|
||||
raise CompileError(globalpos, ERR_BUF_MISSING % name)
|
||||
|
||||
dtype = options.get("dtype")
|
||||
if dtype and dtype.is_extension_type:
|
||||
raise CompileError(globalpos, ERR_BUF_DTYPE)
|
||||
|
||||
ndim = options.get("ndim")
|
||||
if ndim and (not isinstance(ndim, int) or ndim < 0):
|
||||
raise CompileError(globalpos, ERR_BUF_NDIM)
|
||||
|
||||
mode = options.get("mode")
|
||||
if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
|
||||
raise CompileError(globalpos, ERR_BUF_MODE)
|
||||
|
||||
def assert_bool(name):
|
||||
x = options.get(name)
|
||||
if not isinstance(x, bool):
|
||||
raise CompileError(globalpos, ERR_BUF_BOOL % name)
|
||||
|
||||
assert_bool('negative_indices')
|
||||
assert_bool('cast')
|
||||
|
||||
return options
|
||||
|
||||
|
||||
#
|
||||
# Code generation
|
||||
#
|
||||
|
||||
class BufferEntry(object):
|
||||
def __init__(self, entry):
|
||||
self.entry = entry
|
||||
self.type = entry.type
|
||||
self.cname = entry.buffer_aux.buflocal_nd_var.cname
|
||||
self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
|
||||
self.buf_ptr_type = entry.type.buffer_ptr_type
|
||||
self.init_attributes()
|
||||
|
||||
def init_attributes(self):
|
||||
self.shape = self.get_buf_shapevars()
|
||||
self.strides = self.get_buf_stridevars()
|
||||
self.suboffsets = self.get_buf_suboffsetvars()
|
||||
|
||||
def get_buf_suboffsetvars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].suboffsets")
|
||||
|
||||
def get_buf_stridevars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].strides")
|
||||
|
||||
def get_buf_shapevars(self):
|
||||
return self._for_all_ndim("%s.diminfo[%d].shape")
|
||||
|
||||
def _for_all_ndim(self, s):
|
||||
return [s % (self.cname, i) for i in range(self.type.ndim)]
|
||||
|
||||
def generate_buffer_lookup_code(self, code, index_cnames):
|
||||
# Create buffer lookup and return it
|
||||
# This is done via utility macros/inline functions, which vary
|
||||
# according to the access mode used.
|
||||
params = []
|
||||
nd = self.type.ndim
|
||||
mode = self.type.mode
|
||||
if mode == 'full':
|
||||
for i, s, o in zip(index_cnames,
|
||||
self.get_buf_stridevars(),
|
||||
self.get_buf_suboffsetvars()):
|
||||
params.append(i)
|
||||
params.append(s)
|
||||
params.append(o)
|
||||
funcname = "__Pyx_BufPtrFull%dd" % nd
|
||||
funcgen = buf_lookup_full_code
|
||||
else:
|
||||
if mode == 'strided':
|
||||
funcname = "__Pyx_BufPtrStrided%dd" % nd
|
||||
funcgen = buf_lookup_strided_code
|
||||
elif mode == 'c':
|
||||
funcname = "__Pyx_BufPtrCContig%dd" % nd
|
||||
funcgen = buf_lookup_c_code
|
||||
elif mode == 'fortran':
|
||||
funcname = "__Pyx_BufPtrFortranContig%dd" % nd
|
||||
funcgen = buf_lookup_fortran_code
|
||||
else:
|
||||
assert False
|
||||
for i, s in zip(index_cnames, self.get_buf_stridevars()):
|
||||
params.append(i)
|
||||
params.append(s)
|
||||
|
||||
# Make sure the utility code is available
|
||||
if funcname not in code.globalstate.utility_codes:
|
||||
code.globalstate.utility_codes.add(funcname)
|
||||
protocode = code.globalstate['utility_code_proto']
|
||||
defcode = code.globalstate['utility_code_def']
|
||||
funcgen(protocode, defcode, name=funcname, nd=nd)
|
||||
|
||||
buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
|
||||
ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
|
||||
", ".join(params))
|
||||
return ptrcode
|
||||
|
||||
|
||||
def get_flags(buffer_aux, buffer_type):
|
||||
flags = 'PyBUF_FORMAT'
|
||||
mode = buffer_type.mode
|
||||
if mode == 'full':
|
||||
flags += '| PyBUF_INDIRECT'
|
||||
elif mode == 'strided':
|
||||
flags += '| PyBUF_STRIDES'
|
||||
elif mode == 'c':
|
||||
flags += '| PyBUF_C_CONTIGUOUS'
|
||||
elif mode == 'fortran':
|
||||
flags += '| PyBUF_F_CONTIGUOUS'
|
||||
else:
|
||||
assert False
|
||||
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
|
||||
return flags
|
||||
|
||||
def used_buffer_aux_vars(entry):
|
||||
buffer_aux = entry.buffer_aux
|
||||
buffer_aux.buflocal_nd_var.used = True
|
||||
buffer_aux.rcbuf_var.used = True
|
||||
|
||||
def put_unpack_buffer_aux_into_scope(buf_entry, code):
|
||||
# Generate code to copy the needed struct info into local
|
||||
# variables.
|
||||
buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
|
||||
fldnames = ['strides', 'shape']
|
||||
if mode == 'full':
|
||||
fldnames.append('suboffsets')
|
||||
|
||||
ln = []
|
||||
for i in range(buf_entry.type.ndim):
|
||||
for fldname in fldnames:
|
||||
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \
|
||||
(pybuffernd_struct, i, fldname,
|
||||
pybuffernd_struct, fldname, i))
|
||||
code.putln(' '.join(ln))
|
||||
|
||||
def put_init_vars(entry, code):
|
||||
bufaux = entry.buffer_aux
|
||||
pybuffernd_struct = bufaux.buflocal_nd_var.cname
|
||||
pybuffer_struct = bufaux.rcbuf_var.cname
|
||||
# init pybuffer_struct
|
||||
code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
|
||||
code.putln("%s.refcount = 0;" % pybuffer_struct)
|
||||
# init the buffer object
|
||||
# code.put_init_var_to_py_none(entry)
|
||||
# init the pybuffernd_struct
|
||||
code.putln("%s.data = NULL;" % pybuffernd_struct)
|
||||
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
|
||||
|
||||
|
||||
def put_acquire_arg_buffer(entry, code, pos):
|
||||
buffer_aux = entry.buffer_aux
|
||||
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
|
||||
|
||||
# Acquire any new buffer
|
||||
code.putln("{")
|
||||
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
|
||||
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
|
||||
code.putln("}")
|
||||
# An exception raised in arg parsing cannot be caught, so no
|
||||
# need to care about the buffer then.
|
||||
put_unpack_buffer_aux_into_scope(entry, code)
|
||||
|
||||
|
||||
def put_release_buffer_code(code, entry):
|
||||
code.globalstate.use_utility_code(acquire_utility_code)
|
||||
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
|
||||
|
||||
|
||||
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
|
||||
ndim = buffer_type.ndim
|
||||
cast = int(buffer_type.cast)
|
||||
flags = get_flags(buffer_aux, buffer_type)
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
|
||||
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
|
||||
|
||||
code.globalstate.use_utility_code(acquire_utility_code)
|
||||
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
|
||||
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
|
||||
"%(cast)d, __pyx_stack)" % locals())
|
||||
|
||||
|
||||
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
|
||||
is_initialized, pos, code):
|
||||
"""
|
||||
Generate code for reassigning a buffer variables. This only deals with getting
|
||||
the buffer auxiliary structure and variables set up correctly, the assignment
|
||||
itself and refcounting is the responsibility of the caller.
|
||||
|
||||
However, the assignment operation may throw an exception so that the reassignment
|
||||
never happens.
|
||||
|
||||
Depending on the circumstances there are two possible outcomes:
|
||||
- Old buffer released, new acquired, rhs assigned to lhs
|
||||
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
|
||||
(which may or may not succeed).
|
||||
"""
|
||||
|
||||
buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
|
||||
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
||||
flags = get_flags(buffer_aux, buffer_type)
|
||||
|
||||
code.putln("{") # Set up necessary stack for getbuffer
|
||||
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
|
||||
|
||||
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
|
||||
|
||||
if is_initialized:
|
||||
# Release any existing buffer
|
||||
code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
|
||||
# Acquire
|
||||
retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
|
||||
code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
|
||||
# If acquisition failed, attempt to reacquire the old buffer
|
||||
# before raising the exception. A failure of reacquisition
|
||||
# will cause the reacquisition exception to be reported, one
|
||||
# can consider working around this later.
|
||||
exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
|
||||
for _ in range(3))
|
||||
code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
|
||||
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
|
||||
code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
|
||||
code.globalstate.use_utility_code(raise_buffer_fallback_code)
|
||||
code.putln('__Pyx_RaiseBufferFallbackError();')
|
||||
code.putln('} else {')
|
||||
code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
|
||||
code.putln('}')
|
||||
code.putln('%s = %s = %s = 0;' % exc_temps)
|
||||
for t in exc_temps:
|
||||
code.funcstate.release_temp(t)
|
||||
code.putln('}')
|
||||
# Unpack indices
|
||||
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
||||
code.putln(code.error_goto_if_neg(retcode_cname, pos))
|
||||
code.funcstate.release_temp(retcode_cname)
|
||||
else:
|
||||
# Our entry had no previous value, so set to None when acquisition fails.
|
||||
# In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
|
||||
# so it suffices to set the buf field to NULL.
|
||||
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
|
||||
code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
|
||||
(lhs_cname,
|
||||
PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
|
||||
pybuffernd_struct))
|
||||
code.putln(code.error_goto(pos))
|
||||
code.put('} else {')
|
||||
# Unpack indices
|
||||
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
||||
code.putln('}')
|
||||
|
||||
code.putln("}") # Release stack
|
||||
|
||||
|
||||
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
|
||||
pos, code, negative_indices, in_nogil_context):
|
||||
"""
|
||||
Generates code to process indices and calculate an offset into
|
||||
a buffer. Returns a C string which gives a pointer which can be
|
||||
read from or written to at will (it is an expression so caller should
|
||||
store it in a temporary if it is used more than once).
|
||||
|
||||
As the bounds checking can have any number of combinations of unsigned
|
||||
arguments, smart optimizations etc. we insert it directly in the function
|
||||
body. The lookup however is delegated to a inline function that is instantiated
|
||||
once per ndim (lookup with suboffsets tend to get quite complicated).
|
||||
|
||||
entry is a BufferEntry
|
||||
"""
|
||||
negative_indices = directives['wraparound'] and negative_indices
|
||||
|
||||
if directives['boundscheck']:
|
||||
# Check bounds and fix negative indices.
|
||||
# We allocate a temporary which is initialized to -1, meaning OK (!).
|
||||
# If an error occurs, the temp is set to the index dimension the
|
||||
# error is occurring at.
|
||||
failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = -1;" % failed_dim_temp)
|
||||
for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
|
||||
if signed != 0:
|
||||
# not unsigned, deal with negative index
|
||||
code.putln("if (%s < 0) {" % cname)
|
||||
if negative_indices:
|
||||
code.putln("%s += %s;" % (cname, shape))
|
||||
code.putln("if (%s) %s = %d;" % (
|
||||
code.unlikely("%s < 0" % cname),
|
||||
failed_dim_temp, dim))
|
||||
else:
|
||||
code.putln("%s = %d;" % (failed_dim_temp, dim))
|
||||
code.put("} else ")
|
||||
# check bounds in positive direction
|
||||
if signed != 0:
|
||||
cast = ""
|
||||
else:
|
||||
cast = "(size_t)"
|
||||
code.putln("if (%s) %s = %d;" % (
|
||||
code.unlikely("%s >= %s%s" % (cname, cast, shape)),
|
||||
failed_dim_temp, dim))
|
||||
|
||||
if in_nogil_context:
|
||||
code.globalstate.use_utility_code(raise_indexerror_nogil)
|
||||
func = '__Pyx_RaiseBufferIndexErrorNogil'
|
||||
else:
|
||||
code.globalstate.use_utility_code(raise_indexerror_code)
|
||||
func = '__Pyx_RaiseBufferIndexError'
|
||||
|
||||
code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
|
||||
code.putln('%s(%s);' % (func, failed_dim_temp))
|
||||
code.putln(code.error_goto(pos))
|
||||
code.putln('}')
|
||||
code.funcstate.release_temp(failed_dim_temp)
|
||||
elif negative_indices:
|
||||
# Only fix negative indices.
|
||||
for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
|
||||
if signed != 0:
|
||||
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))
|
||||
|
||||
return entry.generate_buffer_lookup_code(code, index_cnames)
|
||||
|
||||
|
||||
def use_bufstruct_declare_code(env):
|
||||
env.use_utility_code(buffer_struct_declare_code)
|
||||
|
||||
|
||||
def buf_lookup_full_code(proto, defin, name, nd):
|
||||
"""
|
||||
Generates a buffer lookup function for the right number
|
||||
of dimensions. The function gives back a void* at the right location.
|
||||
"""
|
||||
# _i_ndex, _s_tride, sub_o_ffset
|
||||
macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
|
||||
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
|
||||
|
||||
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
|
||||
proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
|
||||
defin.putln(dedent("""
|
||||
static CYTHON_INLINE void* %s_imp(void* buf, %s) {
|
||||
char* ptr = (char*)buf;
|
||||
""") % (name, funcargs) + "".join([dedent("""\
|
||||
ptr += s%d * i%d;
|
||||
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
|
||||
""") % (i, i, i, i) for i in range(nd)]
|
||||
) + "\nreturn ptr;\n}")
|
||||
|
||||
|
||||
def buf_lookup_strided_code(proto, defin, name, nd):
|
||||
"""
|
||||
Generates a buffer lookup function for the right number
|
||||
of dimensions. The function gives back a void* at the right location.
|
||||
"""
|
||||
# _i_ndex, _s_tride
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
|
||||
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
|
||||
|
||||
|
||||
def buf_lookup_c_code(proto, defin, name, nd):
|
||||
"""
|
||||
Similar to strided lookup, but can assume that the last dimension
|
||||
doesn't need a multiplication as long as.
|
||||
Still we keep the same signature for now.
|
||||
"""
|
||||
if nd == 1:
|
||||
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
||||
else:
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
|
||||
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
|
||||
|
||||
|
||||
def buf_lookup_fortran_code(proto, defin, name, nd):
|
||||
"""
|
||||
Like C lookup, but the first index is optimized instead.
|
||||
"""
|
||||
if nd == 1:
|
||||
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
||||
else:
|
||||
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
||||
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
|
||||
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))
|
||||
|
||||
|
||||
def use_py2_buffer_functions(env):
|
||||
env.use_utility_code(GetAndReleaseBufferUtilityCode())
|
||||
|
||||
|
||||
class GetAndReleaseBufferUtilityCode(object):
|
||||
# Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
|
||||
# For >= 2.6 we do double mode -- use the new buffer interface on objects
|
||||
# which has the right tp_flags set, but emulation otherwise.
|
||||
|
||||
requires = None
|
||||
is_cython_utility = False
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, GetAndReleaseBufferUtilityCode)
|
||||
|
||||
def __hash__(self):
|
||||
return 24342342
|
||||
|
||||
def get_tree(self, **kwargs): pass
|
||||
|
||||
def put_code(self, output):
|
||||
code = output['utility_code_def']
|
||||
proto_code = output['utility_code_proto']
|
||||
env = output.module_node.scope
|
||||
cython_scope = env.context.cython_scope
|
||||
|
||||
# Search all types for __getbuffer__ overloads
|
||||
types = []
|
||||
visited_scopes = set()
|
||||
def find_buffer_types(scope):
|
||||
if scope in visited_scopes:
|
||||
return
|
||||
visited_scopes.add(scope)
|
||||
for m in scope.cimported_modules:
|
||||
find_buffer_types(m)
|
||||
for e in scope.type_entries:
|
||||
if isinstance(e.utility_code_definition, CythonUtilityCode):
|
||||
continue
|
||||
t = e.type
|
||||
if t.is_extension_type:
|
||||
if scope is cython_scope and not e.used:
|
||||
continue
|
||||
release = get = None
|
||||
for x in t.scope.pyfunc_entries:
|
||||
if x.name == u"__getbuffer__": get = x.func_cname
|
||||
elif x.name == u"__releasebuffer__": release = x.func_cname
|
||||
if get:
|
||||
types.append((t.typeptr_cname, get, release))
|
||||
|
||||
find_buffer_types(env)
|
||||
|
||||
util_code = TempitaUtilityCode.load(
|
||||
"GetAndReleaseBuffer", from_file="Buffer.c",
|
||||
context=dict(types=types))
|
||||
|
||||
proto = util_code.format_code(util_code.proto)
|
||||
impl = util_code.format_code(
|
||||
util_code.inject_string_constants(util_code.impl, output)[1])
|
||||
|
||||
proto_code.putln(proto)
|
||||
code.putln(impl)
|
||||
|
||||
|
||||
def mangle_dtype_name(dtype):
|
||||
# Use prefixes to separate user defined types from builtins
|
||||
# (consider "typedef float unsigned_int")
|
||||
if dtype.is_pyobject:
|
||||
return "object"
|
||||
elif dtype.is_ptr:
|
||||
return "ptr"
|
||||
else:
|
||||
if dtype.is_typedef or dtype.is_struct_or_union:
|
||||
prefix = "nn_"
|
||||
else:
|
||||
prefix = ""
|
||||
return prefix + dtype.specialization_name()
|
||||
|
||||
def get_type_information_cname(code, dtype, maxdepth=None):
|
||||
"""
|
||||
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
|
||||
and return the name of the type info struct.
|
||||
|
||||
Structs with two floats of the same size are encoded as complex numbers.
|
||||
One can separate between complex numbers declared as struct or with native
|
||||
encoding by inspecting to see if the fields field of the type is
|
||||
filled in.
|
||||
"""
|
||||
namesuffix = mangle_dtype_name(dtype)
|
||||
name = "__Pyx_TypeInfo_%s" % namesuffix
|
||||
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
|
||||
|
||||
if dtype.is_error: return "<error>"
|
||||
|
||||
# It's critical that walking the type info doesn't use more stack
|
||||
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
|
||||
if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
|
||||
if maxdepth <= 0:
|
||||
assert False
|
||||
|
||||
if name not in code.globalstate.utility_codes:
|
||||
code.globalstate.utility_codes.add(name)
|
||||
typecode = code.globalstate['typeinfo']
|
||||
|
||||
arraysizes = []
|
||||
if dtype.is_array:
|
||||
while dtype.is_array:
|
||||
arraysizes.append(dtype.size)
|
||||
dtype = dtype.base_type
|
||||
|
||||
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
|
||||
|
||||
declcode = dtype.empty_declaration_code()
|
||||
if dtype.is_simple_buffer_dtype():
|
||||
structinfo_name = "NULL"
|
||||
elif dtype.is_struct:
|
||||
struct_scope = dtype.scope
|
||||
if dtype.is_const:
|
||||
struct_scope = struct_scope.const_base_type_scope
|
||||
# Must pre-call all used types in order not to recurse during utility code writing.
|
||||
fields = struct_scope.var_entries
|
||||
assert len(fields) > 0
|
||||
types = [get_type_information_cname(code, f.type, maxdepth - 1)
|
||||
for f in fields]
|
||||
typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
|
||||
for f, typeinfo in zip(fields, types):
|
||||
typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' %
|
||||
(typeinfo, f.name, dtype.empty_declaration_code(), f.cname), safe=True)
|
||||
typecode.putln(' {NULL, NULL, 0}', safe=True)
|
||||
typecode.putln("};", safe=True)
|
||||
else:
|
||||
assert False
|
||||
|
||||
rep = str(dtype)
|
||||
|
||||
flags = "0"
|
||||
is_unsigned = "0"
|
||||
if dtype is PyrexTypes.c_char_type:
|
||||
is_unsigned = "IS_UNSIGNED(%s)" % declcode
|
||||
typegroup = "'H'"
|
||||
elif dtype.is_int:
|
||||
is_unsigned = "IS_UNSIGNED(%s)" % declcode
|
||||
typegroup = "%s ? 'U' : 'I'" % is_unsigned
|
||||
elif complex_possible or dtype.is_complex:
|
||||
typegroup = "'C'"
|
||||
elif dtype.is_float:
|
||||
typegroup = "'R'"
|
||||
elif dtype.is_struct:
|
||||
typegroup = "'S'"
|
||||
if dtype.packed:
|
||||
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
|
||||
elif dtype.is_pyobject:
|
||||
typegroup = "'O'"
|
||||
else:
|
||||
assert False, dtype
|
||||
|
||||
typeinfo = ('static __Pyx_TypeInfo %s = '
|
||||
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
|
||||
tup = (name, rep, structinfo_name, declcode,
|
||||
', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
|
||||
typegroup, is_unsigned, flags)
|
||||
typecode.putln(typeinfo % tup, safe=True)
|
||||
|
||||
return name
|
||||
|
||||
def load_buffer_utility(util_code_name, context=None, **kwargs):
|
||||
if context is None:
|
||||
return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
|
||||
else:
|
||||
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
|
||||
|
||||
context = dict(max_dims=Options.buffer_max_dims)
|
||||
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
|
||||
buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
|
||||
|
||||
# Utility function to set the right exception
|
||||
# The caller should immediately goto_error
|
||||
raise_indexerror_code = load_buffer_utility("BufferIndexError")
|
||||
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
|
||||
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
|
||||
|
||||
acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
|
||||
buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
|
||||
|
||||
# See utility code BufferFormatFromTypeInfo
|
||||
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
|
@ -0,0 +1,444 @@
|
||||
#
|
||||
# Builtin Definitions
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Symtab import BuiltinScope, StructOrUnionScope
|
||||
from .Code import UtilityCode
|
||||
from .TypeSlots import Signature
|
||||
from . import PyrexTypes
|
||||
from . import Options
|
||||
|
||||
|
||||
# C-level implementations of builtin types, functions and methods
|
||||
|
||||
iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c")
|
||||
getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c")
|
||||
getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c")
|
||||
pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c")
|
||||
pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c")
|
||||
globals_utility_code = UtilityCode.load("Globals", "Builtins.c")
|
||||
|
||||
builtin_utility_code = {
|
||||
'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"),
|
||||
}
|
||||
|
||||
|
||||
# mapping from builtins to their C-level equivalents
|
||||
|
||||
class _BuiltinOverride(object):
|
||||
def __init__(self, py_name, args, ret_type, cname, py_equiv="*",
|
||||
utility_code=None, sig=None, func_type=None,
|
||||
is_strict_signature=False, builtin_return_type=None):
|
||||
self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv
|
||||
self.args, self.ret_type = args, ret_type
|
||||
self.func_type, self.sig = func_type, sig
|
||||
self.builtin_return_type = builtin_return_type
|
||||
self.is_strict_signature = is_strict_signature
|
||||
self.utility_code = utility_code
|
||||
|
||||
def build_func_type(self, sig=None, self_arg=None):
|
||||
if sig is None:
|
||||
sig = Signature(self.args, self.ret_type)
|
||||
sig.exception_check = False # not needed for the current builtins
|
||||
func_type = sig.function_type(self_arg)
|
||||
if self.is_strict_signature:
|
||||
func_type.is_strict_signature = True
|
||||
if self.builtin_return_type:
|
||||
func_type.return_type = builtin_types[self.builtin_return_type]
|
||||
return func_type
|
||||
|
||||
|
||||
class BuiltinAttribute(object):
|
||||
def __init__(self, py_name, cname=None, field_type=None, field_type_name=None):
|
||||
self.py_name = py_name
|
||||
self.cname = cname or py_name
|
||||
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
|
||||
self.field_type = field_type
|
||||
|
||||
def declare_in_type(self, self_type):
|
||||
if self.field_type_name is not None:
|
||||
# lazy type lookup
|
||||
field_type = builtin_scope.lookup(self.field_type_name).type
|
||||
else:
|
||||
field_type = self.field_type or PyrexTypes.py_object_type
|
||||
entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private')
|
||||
entry.is_variable = True
|
||||
|
||||
|
||||
class BuiltinFunction(_BuiltinOverride):
|
||||
def declare_in_scope(self, scope):
|
||||
func_type, sig = self.func_type, self.sig
|
||||
if func_type is None:
|
||||
func_type = self.build_func_type(sig)
|
||||
scope.declare_builtin_cfunction(self.py_name, func_type, self.cname,
|
||||
self.py_equiv, self.utility_code)
|
||||
|
||||
|
||||
class BuiltinMethod(_BuiltinOverride):
|
||||
def declare_in_type(self, self_type):
|
||||
method_type, sig = self.func_type, self.sig
|
||||
if method_type is None:
|
||||
# override 'self' type (first argument)
|
||||
self_arg = PyrexTypes.CFuncTypeArg("", self_type, None)
|
||||
self_arg.not_none = True
|
||||
self_arg.accept_builtin_subtypes = True
|
||||
method_type = self.build_func_type(sig, self_arg)
|
||||
self_type.scope.declare_builtin_cfunction(
|
||||
self.py_name, method_type, self.cname, utility_code=self.utility_code)
|
||||
|
||||
|
||||
builtin_function_table = [
|
||||
# name, args, return, C API func, py equiv = "*"
|
||||
BuiltinFunction('abs', "d", "d", "fabs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "f", "f", "fabsf",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "i", "i", "abs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', "l", "l", "labs",
|
||||
is_strict_signature = True),
|
||||
BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
|
||||
utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_longlong_type, [
|
||||
PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
|
||||
],
|
||||
is_strict_signature = True, nogil=True)),
|
||||
] + list(
|
||||
BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
t,
|
||||
[PyrexTypes.CFuncTypeArg("arg", t, None)],
|
||||
is_strict_signature = True, nogil=True))
|
||||
for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
|
||||
) + list(
|
||||
BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix),
|
||||
func_type = PyrexTypes.CFuncType(
|
||||
t.real_type, [
|
||||
PyrexTypes.CFuncTypeArg("arg", t, None)
|
||||
],
|
||||
is_strict_signature = True, nogil=True))
|
||||
for t in (PyrexTypes.c_float_complex_type,
|
||||
PyrexTypes.c_double_complex_type,
|
||||
PyrexTypes.c_longdouble_complex_type)
|
||||
) + [
|
||||
BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute",
|
||||
utility_code=UtilityCode.load("py_abs", "Builtins.c")),
|
||||
#('all', "", "", ""),
|
||||
#('any', "", "", ""),
|
||||
#('ascii', "", "", ""),
|
||||
#('bin', "", "", ""),
|
||||
BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check",
|
||||
utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")),
|
||||
#('chr', "", "", ""),
|
||||
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
|
||||
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
|
||||
BuiltinFunction('delattr', "OO", "r", "PyObject_DelAttr"),
|
||||
BuiltinFunction('dir', "O", "O", "PyObject_Dir"),
|
||||
BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod"),
|
||||
BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals",
|
||||
utility_code = pyexec_globals_utility_code),
|
||||
BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2",
|
||||
utility_code = pyexec_utility_code),
|
||||
BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3",
|
||||
utility_code = pyexec_utility_code),
|
||||
#('eval', "", "", ""),
|
||||
#('execfile', "", "", ""),
|
||||
#('filter', "", "", ""),
|
||||
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
|
||||
utility_code=getattr3_utility_code), # Pyrex legacy
|
||||
BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3",
|
||||
utility_code=getattr3_utility_code),
|
||||
BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr",
|
||||
utility_code=getattr_utility_code),
|
||||
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
|
||||
utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
|
||||
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
|
||||
#('hex', "", "", ""),
|
||||
#('id', "", "", ""),
|
||||
#('input', "", "", ""),
|
||||
BuiltinFunction('intern', "O", "O", "__Pyx_Intern",
|
||||
utility_code = UtilityCode.load("Intern", "Builtins.c")),
|
||||
BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"),
|
||||
BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"),
|
||||
BuiltinFunction('iter', "OO", "O", "PyCallIter_New"),
|
||||
BuiltinFunction('iter', "O", "O", "PyObject_GetIter"),
|
||||
BuiltinFunction('len', "O", "z", "PyObject_Length"),
|
||||
BuiltinFunction('locals', "", "O", "__pyx_locals"),
|
||||
#('map', "", "", ""),
|
||||
#('max', "", "", ""),
|
||||
#('min', "", "", ""),
|
||||
BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next",
|
||||
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
||||
BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2",
|
||||
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
||||
#('oct', "", "", ""),
|
||||
#('open', "ss", "O", "PyFile_FromString"), # not in Py3
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_long_cast",
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
||||
is_strict_signature=True))
|
||||
for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type]
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_uchar_cast",
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
||||
is_strict_signature=True))
|
||||
for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type]
|
||||
] + [
|
||||
BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord",
|
||||
utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"),
|
||||
func_type=PyrexTypes.CFuncType(
|
||||
PyrexTypes.c_long_type, [
|
||||
PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None)
|
||||
],
|
||||
exception_value="(long)(Py_UCS4)-1")),
|
||||
BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"),
|
||||
BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2",
|
||||
utility_code = UtilityCode.load("pow2", "Builtins.c")),
|
||||
#('range', "", "", ""),
|
||||
#('raw_input', "", "", ""),
|
||||
#('reduce', "", "", ""),
|
||||
BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"),
|
||||
BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1
|
||||
#('round', "", "", ""),
|
||||
BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"),
|
||||
#('sum', "", "", ""),
|
||||
#('sorted', "", "", ""),
|
||||
#('type', "O", "O", "PyObject_Type"),
|
||||
#('unichr', "", "", ""),
|
||||
#('unicode', "", "", ""),
|
||||
#('vars', "", "", ""),
|
||||
#('zip', "", "", ""),
|
||||
# Can't do these easily until we have builtin type entries.
|
||||
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
|
||||
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
|
||||
|
||||
# Put in namespace append optimization.
|
||||
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
|
||||
|
||||
# This is conditionally looked up based on a compiler directive.
|
||||
BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals",
|
||||
utility_code=globals_utility_code),
|
||||
]
|
||||
|
||||
|
||||
# Builtin types
|
||||
# bool
|
||||
# buffer
|
||||
# classmethod
|
||||
# dict
|
||||
# enumerate
|
||||
# file
|
||||
# float
|
||||
# int
|
||||
# list
|
||||
# long
|
||||
# object
|
||||
# property
|
||||
# slice
|
||||
# staticmethod
|
||||
# super
|
||||
# str
|
||||
# tuple
|
||||
# type
|
||||
# xrange
|
||||
|
||||
builtin_types_table = [
|
||||
|
||||
("type", "PyType_Type", []),
|
||||
|
||||
# This conflicts with the C++ bool type, and unfortunately
|
||||
# C++ is too liberal about PyObject* <-> bool conversions,
|
||||
# resulting in unintuitive runtime behavior and segfaults.
|
||||
# ("bool", "PyBool_Type", []),
|
||||
|
||||
("int", "PyInt_Type", []),
|
||||
("long", "PyLong_Type", []),
|
||||
("float", "PyFloat_Type", []),
|
||||
|
||||
("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'),
|
||||
BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type),
|
||||
BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type),
|
||||
]),
|
||||
|
||||
("basestring", "PyBaseString_Type", [
|
||||
BuiltinMethod("join", "TO", "T", "__Pyx_PyBaseString_Join",
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("bytearray", "PyByteArray_Type", [
|
||||
]),
|
||||
("bytes", "PyBytes_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join",
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("str", "PyString_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyString_Join",
|
||||
builtin_return_type='basestring',
|
||||
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
||||
]),
|
||||
("unicode", "PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"),
|
||||
BuiltinMethod("join", "TO", "T", "PyUnicode_Join"),
|
||||
]),
|
||||
|
||||
("tuple", "PyTuple_Type", []),
|
||||
|
||||
("list", "PyList_Type", [BuiltinMethod("insert", "TzO", "r", "PyList_Insert"),
|
||||
BuiltinMethod("reverse", "T", "r", "PyList_Reverse"),
|
||||
BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append",
|
||||
utility_code=UtilityCode.load("ListAppend", "Optimize.c")),
|
||||
BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend",
|
||||
utility_code=UtilityCode.load("ListExtend", "Optimize.c")),
|
||||
]),
|
||||
|
||||
("dict", "PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"),
|
||||
BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"),
|
||||
BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items",
|
||||
utility_code=UtilityCode.load("py_dict_items", "Builtins.c")),
|
||||
BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys",
|
||||
utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")),
|
||||
BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values",
|
||||
utility_code=UtilityCode.load("py_dict_values", "Builtins.c")),
|
||||
BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems",
|
||||
utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")),
|
||||
BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys",
|
||||
utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")),
|
||||
BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues",
|
||||
utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")),
|
||||
BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems",
|
||||
utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")),
|
||||
BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys",
|
||||
utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")),
|
||||
BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues",
|
||||
utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")),
|
||||
BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear",
|
||||
utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")),
|
||||
BuiltinMethod("copy", "T", "T", "PyDict_Copy")]),
|
||||
|
||||
("slice", "PySlice_Type", [BuiltinAttribute('start'),
|
||||
BuiltinAttribute('stop'),
|
||||
BuiltinAttribute('step'),
|
||||
]),
|
||||
# ("file", "PyFile_Type", []), # not in Py3
|
||||
|
||||
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
|
||||
# discard() and remove() have a special treatment for unhashable values
|
||||
BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
|
||||
utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
|
||||
BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
|
||||
utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
|
||||
# update is actually variadic (see Github issue #1645)
|
||||
# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
|
||||
# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
|
||||
BuiltinMethod("add", "TO", "r", "PySet_Add"),
|
||||
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
|
||||
("frozenset", "PyFrozenSet_Type", []),
|
||||
("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []),
|
||||
("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []),
|
||||
]
|
||||
|
||||
|
||||
types_that_construct_their_instance = set([
|
||||
# some builtin types do not always return an instance of
|
||||
# themselves - these do:
|
||||
'type', 'bool', 'long', 'float', 'complex',
|
||||
'bytes', 'unicode', 'bytearray',
|
||||
'tuple', 'list', 'dict', 'set', 'frozenset'
|
||||
# 'str', # only in Py3.x
|
||||
# 'file', # only in Py2.x
|
||||
])
|
||||
|
||||
|
||||
builtin_structs_table = [
|
||||
('Py_buffer', 'Py_buffer',
|
||||
[("buf", PyrexTypes.c_void_ptr_type),
|
||||
("obj", PyrexTypes.py_object_type),
|
||||
("len", PyrexTypes.c_py_ssize_t_type),
|
||||
("itemsize", PyrexTypes.c_py_ssize_t_type),
|
||||
("readonly", PyrexTypes.c_bint_type),
|
||||
("ndim", PyrexTypes.c_int_type),
|
||||
("format", PyrexTypes.c_char_ptr_type),
|
||||
("shape", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("strides", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type),
|
||||
("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)),
|
||||
("internal", PyrexTypes.c_void_ptr_type),
|
||||
]),
|
||||
('Py_complex', 'Py_complex',
|
||||
[('real', PyrexTypes.c_double_type),
|
||||
('imag', PyrexTypes.c_double_type),
|
||||
])
|
||||
]
|
||||
|
||||
# set up builtin scope
|
||||
|
||||
builtin_scope = BuiltinScope()
|
||||
|
||||
def init_builtin_funcs():
|
||||
for bf in builtin_function_table:
|
||||
bf.declare_in_scope(builtin_scope)
|
||||
|
||||
builtin_types = {}
|
||||
|
||||
def init_builtin_types():
|
||||
global builtin_types
|
||||
for name, cname, methods in builtin_types_table:
|
||||
utility = builtin_utility_code.get(name)
|
||||
if name == 'frozenset':
|
||||
objstruct_cname = 'PySetObject'
|
||||
elif name == 'bytearray':
|
||||
objstruct_cname = 'PyByteArrayObject'
|
||||
elif name == 'bool':
|
||||
objstruct_cname = None
|
||||
elif name == 'Exception':
|
||||
objstruct_cname = "PyBaseExceptionObject"
|
||||
elif name == 'StopAsyncIteration':
|
||||
objstruct_cname = "PyBaseExceptionObject"
|
||||
else:
|
||||
objstruct_cname = 'Py%sObject' % name.capitalize()
|
||||
the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname)
|
||||
builtin_types[name] = the_type
|
||||
for method in methods:
|
||||
method.declare_in_type(the_type)
|
||||
|
||||
def init_builtin_structs():
|
||||
for name, cname, attribute_types in builtin_structs_table:
|
||||
scope = StructOrUnionScope(name)
|
||||
for attribute_name, attribute_type in attribute_types:
|
||||
scope.declare_var(attribute_name, attribute_type, None,
|
||||
attribute_name, allow_pyobject=True)
|
||||
builtin_scope.declare_struct_or_union(
|
||||
name, "struct", scope, 1, None, cname = cname)
|
||||
|
||||
|
||||
def init_builtins():
|
||||
init_builtin_structs()
|
||||
init_builtin_types()
|
||||
init_builtin_funcs()
|
||||
|
||||
builtin_scope.declare_var(
|
||||
'__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type),
|
||||
pos=None, cname='(!Py_OptimizeFlag)', is_cdef=True)
|
||||
|
||||
global list_type, tuple_type, dict_type, set_type, frozenset_type
|
||||
global bytes_type, str_type, unicode_type, basestring_type, slice_type
|
||||
global float_type, bool_type, type_type, complex_type, bytearray_type
|
||||
type_type = builtin_scope.lookup('type').type
|
||||
list_type = builtin_scope.lookup('list').type
|
||||
tuple_type = builtin_scope.lookup('tuple').type
|
||||
dict_type = builtin_scope.lookup('dict').type
|
||||
set_type = builtin_scope.lookup('set').type
|
||||
frozenset_type = builtin_scope.lookup('frozenset').type
|
||||
slice_type = builtin_scope.lookup('slice').type
|
||||
bytes_type = builtin_scope.lookup('bytes').type
|
||||
str_type = builtin_scope.lookup('str').type
|
||||
unicode_type = builtin_scope.lookup('unicode').type
|
||||
basestring_type = builtin_scope.lookup('basestring').type
|
||||
bytearray_type = builtin_scope.lookup('bytearray').type
|
||||
float_type = builtin_scope.lookup('float').type
|
||||
bool_type = builtin_scope.lookup('bool').type
|
||||
complex_type = builtin_scope.lookup('complex').type
|
||||
|
||||
|
||||
init_builtins()
|
@ -0,0 +1,240 @@
|
||||
#
|
||||
# Cython - Command Line Parsing
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from . import Options
|
||||
|
||||
usage = """\
|
||||
Cython (http://cython.org) is a compiler for code written in the
|
||||
Cython language. Cython is based on Pyrex by Greg Ewing.
|
||||
|
||||
Usage: cython [options] sourcefile.{pyx,py} ...
|
||||
|
||||
Options:
|
||||
-V, --version Display version number of cython compiler
|
||||
-l, --create-listing Write error messages to a listing file
|
||||
-I, --include-dir <directory> Search for include files in named directory
|
||||
(multiple include directories are allowed).
|
||||
-o, --output-file <filename> Specify name of generated C file
|
||||
-t, --timestamps Only compile newer source files
|
||||
-f, --force Compile all source files (overrides implied -t)
|
||||
-v, --verbose Be verbose, print file names on multiple compilation
|
||||
-p, --embed-positions If specified, the positions in Cython files of each
|
||||
function definition is embedded in its docstring.
|
||||
--cleanup <level> Release interned objects on python exit, for memory debugging.
|
||||
Level indicates aggressiveness, default 0 releases nothing.
|
||||
-w, --working <directory> Sets the working directory for Cython (the directory modules
|
||||
are searched from)
|
||||
--gdb Output debug information for cygdb
|
||||
--gdb-outdir <directory> Specify gdb debug information output directory. Implies --gdb.
|
||||
|
||||
-D, --no-docstrings Strip docstrings from the compiled module.
|
||||
-a, --annotate Produce a colorized HTML version of the source.
|
||||
--annotate-coverage <cov.xml> Annotate and include coverage information from cov.xml.
|
||||
--line-directives Produce #line directives pointing to the .pyx source
|
||||
--cplus Output a C++ rather than C file.
|
||||
--embed[=<method_name>] Generate a main() function that embeds the Python interpreter.
|
||||
-2 Compile based on Python-2 syntax and code semantics.
|
||||
-3 Compile based on Python-3 syntax and code semantics.
|
||||
--3str Compile based on Python-3 syntax and code semantics without
|
||||
assuming unicode by default for string literals under Python 2.
|
||||
--lenient Change some compile time errors to runtime errors to
|
||||
improve Python compatibility
|
||||
--capi-reexport-cincludes Add cincluded headers to any auto-generated header files.
|
||||
--fast-fail Abort the compilation on the first error
|
||||
--warning-errors, -Werror Make all warnings into errors
|
||||
--warning-extra, -Wextra Enable extra warnings
|
||||
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
|
||||
-E, --compile-time-env name=value[,<name=value,...] Provides compile time env like DEF would do.
|
||||
--module-name Fully qualified module name. If not given, it is deduced from the
|
||||
import path if source file is in a package, or equals the
|
||||
filename otherwise.
|
||||
-M, --depfile Produce depfiles for the sources
|
||||
"""
|
||||
|
||||
|
||||
# The following experimental options are supported only on MacOSX:
|
||||
# -C, --compile Compile generated .c file to .o file
|
||||
# --link Link .o file to produce extension module (implies -C)
|
||||
# -+, --cplus Use C++ compiler for compiling and linking
|
||||
# Additional .o files to link may be supplied when using -X."""
|
||||
|
||||
def bad_usage():
|
||||
sys.stderr.write(usage)
|
||||
sys.exit(1)
|
||||
|
||||
def parse_command_line(args):
|
||||
from .Main import CompilationOptions, default_options
|
||||
|
||||
pending_arg = []
|
||||
|
||||
def pop_arg():
|
||||
if not args or pending_arg:
|
||||
bad_usage()
|
||||
if '=' in args[0] and args[0].startswith('--'): # allow "--long-option=xyz"
|
||||
name, value = args.pop(0).split('=', 1)
|
||||
pending_arg.append(value)
|
||||
return name
|
||||
return args.pop(0)
|
||||
|
||||
def pop_value(default=None):
|
||||
if pending_arg:
|
||||
return pending_arg.pop()
|
||||
elif default is not None:
|
||||
return default
|
||||
elif not args:
|
||||
bad_usage()
|
||||
return args.pop(0)
|
||||
|
||||
def get_param(option):
|
||||
tail = option[2:]
|
||||
if tail:
|
||||
return tail
|
||||
else:
|
||||
return pop_arg()
|
||||
|
||||
options = CompilationOptions(default_options)
|
||||
sources = []
|
||||
while args:
|
||||
if args[0].startswith("-"):
|
||||
option = pop_arg()
|
||||
if option in ("-V", "--version"):
|
||||
options.show_version = 1
|
||||
elif option in ("-l", "--create-listing"):
|
||||
options.use_listing_file = 1
|
||||
elif option in ("-+", "--cplus"):
|
||||
options.cplus = 1
|
||||
elif option == "--embed":
|
||||
Options.embed = pop_value("main")
|
||||
elif option.startswith("-I"):
|
||||
options.include_path.append(get_param(option))
|
||||
elif option == "--include-dir":
|
||||
options.include_path.append(pop_value())
|
||||
elif option in ("-w", "--working"):
|
||||
options.working_path = pop_value()
|
||||
elif option in ("-o", "--output-file"):
|
||||
options.output_file = pop_value()
|
||||
elif option in ("-t", "--timestamps"):
|
||||
options.timestamps = 1
|
||||
elif option in ("-f", "--force"):
|
||||
options.timestamps = 0
|
||||
elif option in ("-v", "--verbose"):
|
||||
options.verbose += 1
|
||||
elif option in ("-p", "--embed-positions"):
|
||||
Options.embed_pos_in_docstring = 1
|
||||
elif option in ("-z", "--pre-import"):
|
||||
Options.pre_import = pop_value()
|
||||
elif option == "--cleanup":
|
||||
Options.generate_cleanup_code = int(pop_value())
|
||||
elif option in ("-D", "--no-docstrings"):
|
||||
Options.docstrings = False
|
||||
elif option in ("-a", "--annotate"):
|
||||
Options.annotate = True
|
||||
elif option == "--annotate-coverage":
|
||||
Options.annotate = True
|
||||
Options.annotate_coverage_xml = pop_value()
|
||||
elif option == "--convert-range":
|
||||
Options.convert_range = True
|
||||
elif option == "--line-directives":
|
||||
options.emit_linenums = True
|
||||
elif option == "--no-c-in-traceback":
|
||||
options.c_line_in_traceback = False
|
||||
elif option == "--gdb":
|
||||
options.gdb_debug = True
|
||||
options.output_dir = os.curdir
|
||||
elif option == "--gdb-outdir":
|
||||
options.gdb_debug = True
|
||||
options.output_dir = pop_value()
|
||||
elif option == "--lenient":
|
||||
Options.error_on_unknown_names = False
|
||||
Options.error_on_uninitialized = False
|
||||
elif option == '-2':
|
||||
options.language_level = 2
|
||||
elif option == '-3':
|
||||
options.language_level = 3
|
||||
elif option == '--3str':
|
||||
options.language_level = '3str'
|
||||
elif option == "--capi-reexport-cincludes":
|
||||
options.capi_reexport_cincludes = True
|
||||
elif option == "--fast-fail":
|
||||
Options.fast_fail = True
|
||||
elif option == "--cimport-from-pyx":
|
||||
Options.cimport_from_pyx = True
|
||||
elif option in ('-Werror', '--warning-errors'):
|
||||
Options.warning_errors = True
|
||||
elif option in ('-Wextra', '--warning-extra'):
|
||||
options.compiler_directives.update(Options.extra_warnings)
|
||||
elif option == "--old-style-globals":
|
||||
Options.old_style_globals = True
|
||||
elif option == "--directive" or option.startswith('-X'):
|
||||
if option.startswith('-X') and option[2:].strip():
|
||||
x_args = option[2:]
|
||||
else:
|
||||
x_args = pop_value()
|
||||
try:
|
||||
options.compiler_directives = Options.parse_directive_list(
|
||||
x_args, relaxed_bool=True,
|
||||
current_settings=options.compiler_directives)
|
||||
except ValueError as e:
|
||||
sys.stderr.write("Error in compiler directive: %s\n" % e.args[0])
|
||||
sys.exit(1)
|
||||
elif option == "--compile-time-env" or option.startswith('-E'):
|
||||
if option.startswith('-E') and option[2:].strip():
|
||||
x_args = option[2:]
|
||||
else:
|
||||
x_args = pop_value()
|
||||
try:
|
||||
options.compile_time_env = Options.parse_compile_time_env(
|
||||
x_args, current_settings=options.compile_time_env)
|
||||
except ValueError as e:
|
||||
sys.stderr.write("Error in compile-time-env: %s\n" % e.args[0])
|
||||
sys.exit(1)
|
||||
elif option == "--module-name":
|
||||
options.module_name = pop_value()
|
||||
elif option in ('-M', '--depfile'):
|
||||
options.depfile = True
|
||||
elif option.startswith('--debug'):
|
||||
option = option[2:].replace('-', '_')
|
||||
from . import DebugFlags
|
||||
if option in dir(DebugFlags):
|
||||
setattr(DebugFlags, option, True)
|
||||
else:
|
||||
sys.stderr.write("Unknown debug flag: %s\n" % option)
|
||||
bad_usage()
|
||||
elif option in ('-h', '--help'):
|
||||
sys.stdout.write(usage)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write(usage)
|
||||
sys.stderr.write("Unknown compiler flag: %s\n" % option)
|
||||
sys.exit(1)
|
||||
else:
|
||||
sources.append(pop_arg())
|
||||
|
||||
if pending_arg:
|
||||
bad_usage()
|
||||
|
||||
if options.use_listing_file and len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using -o\n")
|
||||
sys.exit(1)
|
||||
if len(sources) == 0 and not options.show_version:
|
||||
bad_usage()
|
||||
if Options.embed and len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using --embed\n")
|
||||
sys.exit(1)
|
||||
if options.module_name:
|
||||
if options.timestamps:
|
||||
sys.stderr.write(
|
||||
"cython: Cannot use --module-name with --timestamps\n")
|
||||
sys.exit(1)
|
||||
if len(sources) > 1:
|
||||
sys.stderr.write(
|
||||
"cython: Only one source file allowed when using --module-name\n")
|
||||
sys.exit(1)
|
||||
return options, sources
|
124
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.pxd
Normal file
124
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.pxd
Normal file
@ -0,0 +1,124 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
from ..StringIOTree cimport StringIOTree
|
||||
|
||||
|
||||
cdef class UtilityCodeBase(object):
|
||||
cpdef format_code(self, code_string, replace_empty_lines=*)
|
||||
|
||||
|
||||
cdef class UtilityCode(UtilityCodeBase):
|
||||
cdef public object name
|
||||
cdef public object proto
|
||||
cdef public object impl
|
||||
cdef public object init
|
||||
cdef public object cleanup
|
||||
cdef public object proto_block
|
||||
cdef public object requires
|
||||
cdef public dict _cache
|
||||
cdef public list specialize_list
|
||||
cdef public object file
|
||||
|
||||
cpdef none_or_sub(self, s, context)
|
||||
|
||||
|
||||
cdef class FunctionState:
|
||||
cdef public set names_taken
|
||||
cdef public object owner
|
||||
cdef public object scope
|
||||
|
||||
cdef public object error_label
|
||||
cdef public size_t label_counter
|
||||
cdef public set labels_used
|
||||
cdef public object return_label
|
||||
cdef public object continue_label
|
||||
cdef public object break_label
|
||||
cdef public list yield_labels
|
||||
|
||||
cdef public object return_from_error_cleanup_label # not used in __init__ ?
|
||||
|
||||
cdef public object exc_vars
|
||||
cdef public object current_except
|
||||
cdef public bint in_try_finally
|
||||
cdef public bint can_trace
|
||||
cdef public bint gil_owned
|
||||
|
||||
cdef public list temps_allocated
|
||||
cdef public dict temps_free
|
||||
cdef public dict temps_used_type
|
||||
cdef public set zombie_temps
|
||||
cdef public size_t temp_counter
|
||||
cdef public list collect_temps_stack
|
||||
|
||||
cdef public object closure_temps
|
||||
cdef public bint should_declare_error_indicator
|
||||
cdef public bint uses_error_indicator
|
||||
|
||||
@cython.locals(n=size_t)
|
||||
cpdef new_label(self, name=*)
|
||||
cpdef tuple get_loop_labels(self)
|
||||
cpdef set_loop_labels(self, labels)
|
||||
cpdef tuple get_all_labels(self)
|
||||
cpdef set_all_labels(self, labels)
|
||||
cpdef start_collecting_temps(self)
|
||||
cpdef stop_collecting_temps(self)
|
||||
|
||||
cpdef list temps_in_use(self)
|
||||
|
||||
cdef class IntConst:
|
||||
cdef public object cname
|
||||
cdef public object value
|
||||
cdef public bint is_long
|
||||
|
||||
cdef class PyObjectConst:
|
||||
cdef public object cname
|
||||
cdef public object type
|
||||
|
||||
cdef class StringConst:
|
||||
cdef public object cname
|
||||
cdef public object text
|
||||
cdef public object escaped_value
|
||||
cdef public dict py_strings
|
||||
cdef public list py_versions
|
||||
|
||||
@cython.locals(intern=bint, is_str=bint, is_unicode=bint)
|
||||
cpdef get_py_string_const(self, encoding, identifier=*, is_str=*, py3str_cstring=*)
|
||||
|
||||
## cdef class PyStringConst:
|
||||
## cdef public object cname
|
||||
## cdef public object encoding
|
||||
## cdef public bint is_str
|
||||
## cdef public bint is_unicode
|
||||
## cdef public bint intern
|
||||
|
||||
#class GlobalState(object):
|
||||
|
||||
#def funccontext_property(name):
|
||||
|
||||
cdef class CCodeWriter(object):
|
||||
cdef readonly StringIOTree buffer
|
||||
cdef readonly list pyclass_stack
|
||||
cdef readonly object globalstate
|
||||
cdef readonly object funcstate
|
||||
cdef object code_config
|
||||
cdef object last_pos
|
||||
cdef object last_marked_pos
|
||||
cdef Py_ssize_t level
|
||||
cdef public Py_ssize_t call_level # debug-only, see Nodes.py
|
||||
cdef bint bol
|
||||
|
||||
cpdef write(self, s)
|
||||
cpdef put(self, code)
|
||||
cpdef put_safe(self, code)
|
||||
cpdef putln(self, code=*, bint safe=*)
|
||||
@cython.final
|
||||
cdef increase_indent(self)
|
||||
@cython.final
|
||||
cdef decrease_indent(self)
|
||||
|
||||
|
||||
cdef class PyrexCodeWriter:
|
||||
cdef public object f
|
||||
cdef public Py_ssize_t level
|
2597
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.py
Normal file
2597
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Code.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,35 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Visitor import VisitorTransform
|
||||
from .Nodes import StatListNode
|
||||
|
||||
|
||||
class ExtractPxdCode(VisitorTransform):
|
||||
"""
|
||||
Finds nodes in a pxd file that should generate code, and
|
||||
returns them in a StatListNode.
|
||||
|
||||
The result is a tuple (StatListNode, ModuleScope), i.e.
|
||||
everything that is needed from the pxd after it is processed.
|
||||
|
||||
A purer approach would be to separately compile the pxd code,
|
||||
but the result would have to be slightly more sophisticated
|
||||
than pure strings (functions + wanted interned strings +
|
||||
wanted utility code + wanted cached objects) so for now this
|
||||
approach is taken.
|
||||
"""
|
||||
|
||||
def __call__(self, root):
|
||||
self.funcs = []
|
||||
self.visitchildren(root)
|
||||
return (StatListNode(root.pos, stats=self.funcs), root.scope)
|
||||
|
||||
def visit_FuncDefNode(self, node):
|
||||
self.funcs.append(node)
|
||||
# Do not visit children, nested funcdefnodes will
|
||||
# also be moved by this action...
|
||||
return node
|
||||
|
||||
def visit_Node(self, node):
|
||||
self.visitchildren(node)
|
||||
return node
|
@ -0,0 +1,164 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Symtab import ModuleScope
|
||||
from .PyrexTypes import *
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from .Errors import error
|
||||
from .Scanning import StringSourceDescriptor
|
||||
from . import MemoryView
|
||||
|
||||
|
||||
class CythonScope(ModuleScope):
|
||||
is_cython_builtin = 1
|
||||
_cythonscope_initialized = False
|
||||
|
||||
def __init__(self, context):
|
||||
ModuleScope.__init__(self, u'cython', None, None)
|
||||
self.pxd_file_loaded = True
|
||||
self.populate_cython_scope()
|
||||
# The Main.Context object
|
||||
self.context = context
|
||||
|
||||
for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type):
|
||||
entry = self.declare_typedef(fused_type.name,
|
||||
fused_type,
|
||||
None,
|
||||
cname='<error>')
|
||||
entry.in_cinclude = True
|
||||
|
||||
def is_cpp(self):
|
||||
# Allow C++ utility code in C++ contexts.
|
||||
return self.context.cpp
|
||||
|
||||
def lookup_type(self, name):
|
||||
# This function should go away when types are all first-level objects.
|
||||
type = parse_basic_type(name)
|
||||
if type:
|
||||
return type
|
||||
|
||||
return super(CythonScope, self).lookup_type(name)
|
||||
|
||||
def lookup(self, name):
|
||||
entry = super(CythonScope, self).lookup(name)
|
||||
|
||||
if entry is None and not self._cythonscope_initialized:
|
||||
self.load_cythonscope()
|
||||
entry = super(CythonScope, self).lookup(name)
|
||||
|
||||
return entry
|
||||
|
||||
def find_module(self, module_name, pos):
|
||||
error("cython.%s is not available" % module_name, pos)
|
||||
|
||||
def find_submodule(self, module_name):
|
||||
entry = self.entries.get(module_name, None)
|
||||
if not entry:
|
||||
self.load_cythonscope()
|
||||
entry = self.entries.get(module_name, None)
|
||||
|
||||
if entry and entry.as_module:
|
||||
return entry.as_module
|
||||
else:
|
||||
# TODO: fix find_submodule control flow so that we're not
|
||||
# expected to create a submodule here (to protect CythonScope's
|
||||
# possible immutability). Hack ourselves out of the situation
|
||||
# for now.
|
||||
raise error((StringSourceDescriptor(u"cython", u""), 0, 0),
|
||||
"cython.%s is not available" % module_name)
|
||||
|
||||
def lookup_qualified_name(self, qname):
|
||||
# ExprNode.as_cython_attribute generates qnames and we untangle it here...
|
||||
name_path = qname.split(u'.')
|
||||
scope = self
|
||||
while len(name_path) > 1:
|
||||
scope = scope.lookup_here(name_path[0])
|
||||
if scope:
|
||||
scope = scope.as_module
|
||||
del name_path[0]
|
||||
if scope is None:
|
||||
return None
|
||||
else:
|
||||
return scope.lookup_here(name_path[0])
|
||||
|
||||
def populate_cython_scope(self):
|
||||
# These are used to optimize isinstance in FinalOptimizePhase
|
||||
type_object = self.declare_typedef(
|
||||
'PyTypeObject',
|
||||
base_type = c_void_type,
|
||||
pos = None,
|
||||
cname = 'PyTypeObject')
|
||||
type_object.is_void = True
|
||||
type_object_type = type_object.type
|
||||
|
||||
self.declare_cfunction(
|
||||
'PyObject_TypeCheck',
|
||||
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
|
||||
CFuncTypeArg("t", c_ptr_type(type_object_type), None)]),
|
||||
pos = None,
|
||||
defining = 1,
|
||||
cname = 'PyObject_TypeCheck')
|
||||
|
||||
def load_cythonscope(self):
|
||||
"""
|
||||
Creates some entries for testing purposes and entries for
|
||||
cython.array() and for cython.view.*.
|
||||
"""
|
||||
if self._cythonscope_initialized:
|
||||
return
|
||||
|
||||
self._cythonscope_initialized = True
|
||||
cython_testscope_utility_code.declare_in_scope(
|
||||
self, cython_scope=self)
|
||||
cython_test_extclass_utility_code.declare_in_scope(
|
||||
self, cython_scope=self)
|
||||
|
||||
#
|
||||
# The view sub-scope
|
||||
#
|
||||
self.viewscope = viewscope = ModuleScope(u'view', self, None)
|
||||
self.declare_module('view', viewscope, None).as_module = viewscope
|
||||
viewscope.is_cython_builtin = True
|
||||
viewscope.pxd_file_loaded = True
|
||||
|
||||
cythonview_testscope_utility_code.declare_in_scope(
|
||||
viewscope, cython_scope=self)
|
||||
|
||||
view_utility_scope = MemoryView.view_utility_code.declare_in_scope(
|
||||
self.viewscope, cython_scope=self,
|
||||
whitelist=MemoryView.view_utility_whitelist)
|
||||
|
||||
# self.entries["array"] = view_utility_scope.entries.pop("array")
|
||||
|
||||
|
||||
def create_cython_scope(context):
|
||||
# One could in fact probably make it a singleton,
|
||||
# but not sure yet whether any code mutates it (which would kill reusing
|
||||
# it across different contexts)
|
||||
return CythonScope(context)
|
||||
|
||||
# Load test utilities for the cython scope
|
||||
|
||||
def load_testscope_utility(cy_util_name, **kwargs):
|
||||
return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs)
|
||||
|
||||
|
||||
undecorated_methods_protos = UtilityCode(proto=u"""
|
||||
/* These methods are undecorated and have therefore no prototype */
|
||||
static PyObject *__pyx_TestClass_cdef_method(
|
||||
struct __pyx_TestClass_obj *self, int value);
|
||||
static PyObject *__pyx_TestClass_cpdef_method(
|
||||
struct __pyx_TestClass_obj *self, int value, int skip_dispatch);
|
||||
static PyObject *__pyx_TestClass_def_method(
|
||||
PyObject *self, PyObject *value);
|
||||
""")
|
||||
|
||||
cython_testscope_utility_code = load_testscope_utility("TestScope")
|
||||
|
||||
test_cython_utility_dep = load_testscope_utility("TestDep")
|
||||
|
||||
cython_test_extclass_utility_code = \
|
||||
load_testscope_utility("TestClass", name="TestClass",
|
||||
requires=[undecorated_methods_protos,
|
||||
test_cython_utility_dep])
|
||||
|
||||
cythonview_testscope_utility_code = load_testscope_utility("View.TestScope")
|
@ -0,0 +1,21 @@
|
||||
# Can be enabled at the command line with --debug-xxx.
|
||||
|
||||
debug_disposal_code = 0
|
||||
debug_temp_alloc = 0
|
||||
debug_coercion = 0
|
||||
|
||||
# Write comments into the C code that show where temporary variables
|
||||
# are allocated and released.
|
||||
debug_temp_code_comments = 0
|
||||
|
||||
# Write a call trace of the code generation phase into the C code.
|
||||
debug_trace_code_generation = 0
|
||||
|
||||
# Do not replace exceptions with user-friendly error messages.
|
||||
debug_no_exception_intercept = 0
|
||||
|
||||
# Print a message each time a new stage in the pipeline is entered.
|
||||
debug_verbose_pipeline = 0
|
||||
|
||||
# Raise an exception when an error is encountered.
|
||||
debug_exception_on_error = 0
|
265
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py
Normal file
265
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py
Normal file
@ -0,0 +1,265 @@
|
||||
#
|
||||
# Errors
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from __builtin__ import basestring as any_string_type
|
||||
except ImportError:
|
||||
any_string_type = (bytes, str)
|
||||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ..Utils import open_new_file
|
||||
from . import DebugFlags
|
||||
from . import Options
|
||||
|
||||
|
||||
class PyrexError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PyrexWarning(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def context(position):
|
||||
source = position[0]
|
||||
assert not (isinstance(source, any_string_type)), (
|
||||
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
|
||||
try:
|
||||
F = source.get_lines()
|
||||
except UnicodeDecodeError:
|
||||
# file has an encoding problem
|
||||
s = u"[unprintable code]\n"
|
||||
else:
|
||||
s = u''.join(F[max(0, position[1]-6):position[1]])
|
||||
s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1))
|
||||
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
|
||||
return s
|
||||
|
||||
def format_position(position):
|
||||
if position:
|
||||
return u"%s:%d:%d: " % (position[0].get_error_description(),
|
||||
position[1], position[2])
|
||||
return u''
|
||||
|
||||
def format_error(message, position):
|
||||
if position:
|
||||
pos_str = format_position(position)
|
||||
cont = context(position)
|
||||
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
|
||||
return message
|
||||
|
||||
class CompileError(PyrexError):
|
||||
|
||||
def __init__(self, position = None, message = u""):
|
||||
self.position = position
|
||||
self.message_only = message
|
||||
self.formatted_message = format_error(message, position)
|
||||
self.reported = False
|
||||
# Deprecated and withdrawn in 2.6:
|
||||
# self.message = message
|
||||
Exception.__init__(self, self.formatted_message)
|
||||
# Python Exception subclass pickling is broken,
|
||||
# see http://bugs.python.org/issue1692335
|
||||
self.args = (position, message)
|
||||
|
||||
def __str__(self):
|
||||
return self.formatted_message
|
||||
|
||||
class CompileWarning(PyrexWarning):
|
||||
|
||||
def __init__(self, position = None, message = ""):
|
||||
self.position = position
|
||||
# Deprecated and withdrawn in 2.6:
|
||||
# self.message = message
|
||||
Exception.__init__(self, format_position(position) + message)
|
||||
|
||||
class InternalError(Exception):
|
||||
# If this is ever raised, there is a bug in the compiler.
|
||||
|
||||
def __init__(self, message):
|
||||
self.message_only = message
|
||||
Exception.__init__(self, u"Internal compiler error: %s"
|
||||
% message)
|
||||
|
||||
class AbortError(Exception):
|
||||
# Throw this to stop the compilation immediately.
|
||||
|
||||
def __init__(self, message):
|
||||
self.message_only = message
|
||||
Exception.__init__(self, u"Abort error: %s" % message)
|
||||
|
||||
class CompilerCrash(CompileError):
|
||||
# raised when an unexpected exception occurs in a transform
|
||||
def __init__(self, pos, context, message, cause, stacktrace=None):
|
||||
if message:
|
||||
message = u'\n' + message
|
||||
else:
|
||||
message = u'\n'
|
||||
self.message_only = message
|
||||
if context:
|
||||
message = u"Compiler crash in %s%s" % (context, message)
|
||||
if stacktrace:
|
||||
import traceback
|
||||
message += (
|
||||
u'\n\nCompiler crash traceback from this point on:\n' +
|
||||
u''.join(traceback.format_tb(stacktrace)))
|
||||
if cause:
|
||||
if not stacktrace:
|
||||
message += u'\n'
|
||||
message += u'%s: %s' % (cause.__class__.__name__, cause)
|
||||
CompileError.__init__(self, pos, message)
|
||||
# Python Exception subclass pickling is broken,
|
||||
# see http://bugs.python.org/issue1692335
|
||||
self.args = (pos, context, message, cause, stacktrace)
|
||||
|
||||
class NoElementTreeInstalledException(PyrexError):
|
||||
"""raised when the user enabled options.gdb_debug but no ElementTree
|
||||
implementation was found
|
||||
"""
|
||||
|
||||
listing_file = None
|
||||
num_errors = 0
|
||||
echo_file = None
|
||||
|
||||
def open_listing_file(path, echo_to_stderr = 1):
|
||||
# Begin a new error listing. If path is None, no file
|
||||
# is opened, the error counter is just reset.
|
||||
global listing_file, num_errors, echo_file
|
||||
if path is not None:
|
||||
listing_file = open_new_file(path)
|
||||
else:
|
||||
listing_file = None
|
||||
if echo_to_stderr:
|
||||
echo_file = sys.stderr
|
||||
else:
|
||||
echo_file = None
|
||||
num_errors = 0
|
||||
|
||||
def close_listing_file():
|
||||
global listing_file
|
||||
if listing_file:
|
||||
listing_file.close()
|
||||
listing_file = None
|
||||
|
||||
def report_error(err, use_stack=True):
|
||||
if error_stack and use_stack:
|
||||
error_stack[-1].append(err)
|
||||
else:
|
||||
global num_errors
|
||||
# See Main.py for why dual reporting occurs. Quick fix for now.
|
||||
if err.reported: return
|
||||
err.reported = True
|
||||
try: line = u"%s\n" % err
|
||||
except UnicodeEncodeError:
|
||||
# Python <= 2.5 does this for non-ASCII Unicode exceptions
|
||||
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
|
||||
getattr(err, 'position', None)) + u'\n'
|
||||
if listing_file:
|
||||
try: listing_file.write(line)
|
||||
except UnicodeEncodeError:
|
||||
listing_file.write(line.encode('ASCII', 'replace'))
|
||||
if echo_file:
|
||||
try: echo_file.write(line)
|
||||
except UnicodeEncodeError:
|
||||
echo_file.write(line.encode('ASCII', 'replace'))
|
||||
num_errors += 1
|
||||
if Options.fast_fail:
|
||||
raise AbortError("fatal errors")
|
||||
|
||||
|
||||
def error(position, message):
|
||||
#print("Errors.error:", repr(position), repr(message)) ###
|
||||
if position is None:
|
||||
raise InternalError(message)
|
||||
err = CompileError(position, message)
|
||||
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
|
||||
report_error(err)
|
||||
return err
|
||||
|
||||
|
||||
LEVEL = 1 # warn about all errors level 1 or higher
|
||||
|
||||
|
||||
def message(position, message, level=1):
|
||||
if level < LEVEL:
|
||||
return
|
||||
warn = CompileWarning(position, message)
|
||||
line = "note: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
return warn
|
||||
|
||||
|
||||
def warning(position, message, level=0):
|
||||
if level < LEVEL:
|
||||
return
|
||||
if Options.warning_errors and position:
|
||||
return error(position, message)
|
||||
warn = CompileWarning(position, message)
|
||||
line = "warning: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
return warn
|
||||
|
||||
|
||||
_warn_once_seen = {}
|
||||
def warn_once(position, message, level=0):
|
||||
if level < LEVEL or message in _warn_once_seen:
|
||||
return
|
||||
warn = CompileWarning(position, message)
|
||||
line = "warning: %s\n" % warn
|
||||
if listing_file:
|
||||
listing_file.write(line)
|
||||
if echo_file:
|
||||
echo_file.write(line)
|
||||
_warn_once_seen[message] = True
|
||||
return warn
|
||||
|
||||
|
||||
# These functions can be used to momentarily suppress errors.
|
||||
|
||||
error_stack = []
|
||||
|
||||
|
||||
def hold_errors():
|
||||
error_stack.append([])
|
||||
|
||||
|
||||
def release_errors(ignore=False):
|
||||
held_errors = error_stack.pop()
|
||||
if not ignore:
|
||||
for err in held_errors:
|
||||
report_error(err)
|
||||
|
||||
|
||||
def held_errors():
|
||||
return error_stack[-1]
|
||||
|
||||
|
||||
# same as context manager:
|
||||
|
||||
@contextmanager
|
||||
def local_errors(ignore=False):
|
||||
errors = []
|
||||
error_stack.append(errors)
|
||||
try:
|
||||
yield errors
|
||||
finally:
|
||||
release_errors(ignore=ignore)
|
||||
|
||||
|
||||
# this module needs a redesign to support parallel cythonisation, but
|
||||
# for now, the following works at least in sequential compiler runs
|
||||
|
||||
def reset():
|
||||
_warn_once_seen.clear()
|
||||
del error_stack[:]
|
13717
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ExprNodes.py
Normal file
13717
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ExprNodes.py
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,111 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
cimport cython
|
||||
|
||||
from .Visitor cimport CythonTransform, TreeVisitor
|
||||
|
||||
cdef class ControlBlock:
|
||||
cdef public set children
|
||||
cdef public set parents
|
||||
cdef public set positions
|
||||
cdef public list stats
|
||||
cdef public dict gen
|
||||
cdef public set bounded
|
||||
|
||||
# Big integer bitsets
|
||||
cdef public object i_input
|
||||
cdef public object i_output
|
||||
cdef public object i_gen
|
||||
cdef public object i_kill
|
||||
cdef public object i_state
|
||||
|
||||
cpdef bint empty(self)
|
||||
cpdef detach(self)
|
||||
cpdef add_child(self, block)
|
||||
|
||||
cdef class ExitBlock(ControlBlock):
|
||||
cpdef bint empty(self)
|
||||
|
||||
cdef class NameAssignment:
|
||||
cdef public bint is_arg
|
||||
cdef public bint is_deletion
|
||||
cdef public object lhs
|
||||
cdef public object rhs
|
||||
cdef public object entry
|
||||
cdef public object pos
|
||||
cdef public set refs
|
||||
cdef public object bit
|
||||
cdef public object inferred_type
|
||||
|
||||
cdef class AssignmentList:
|
||||
cdef public object bit
|
||||
cdef public object mask
|
||||
cdef public list stats
|
||||
|
||||
cdef class AssignmentCollector(TreeVisitor):
|
||||
cdef list assignments
|
||||
|
||||
@cython.final
|
||||
cdef class ControlFlow:
|
||||
cdef public set blocks
|
||||
cdef public set entries
|
||||
cdef public list loops
|
||||
cdef public list exceptions
|
||||
|
||||
cdef public ControlBlock entry_point
|
||||
cdef public ExitBlock exit_point
|
||||
cdef public ControlBlock block
|
||||
|
||||
cdef public dict assmts
|
||||
|
||||
cpdef newblock(self, ControlBlock parent=*)
|
||||
cpdef nextblock(self, ControlBlock parent=*)
|
||||
cpdef bint is_tracked(self, entry)
|
||||
cpdef bint is_statically_assigned(self, entry)
|
||||
cpdef mark_position(self, node)
|
||||
cpdef mark_assignment(self, lhs, rhs, entry)
|
||||
cpdef mark_argument(self, lhs, rhs, entry)
|
||||
cpdef mark_deletion(self, node, entry)
|
||||
cpdef mark_reference(self, node, entry)
|
||||
|
||||
@cython.locals(block=ControlBlock, parent=ControlBlock, unreachable=set)
|
||||
cpdef normalize(self)
|
||||
|
||||
@cython.locals(bit=object, assmts=AssignmentList,
|
||||
block=ControlBlock)
|
||||
cpdef initialize(self)
|
||||
|
||||
@cython.locals(assmts=AssignmentList, assmt=NameAssignment)
|
||||
cpdef set map_one(self, istate, entry)
|
||||
|
||||
@cython.locals(block=ControlBlock, parent=ControlBlock)
|
||||
cdef reaching_definitions(self)
|
||||
|
||||
cdef class Uninitialized:
|
||||
pass
|
||||
|
||||
cdef class Unknown:
|
||||
pass
|
||||
|
||||
|
||||
cdef class MessageCollection:
|
||||
cdef set messages
|
||||
|
||||
|
||||
@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock,
|
||||
assmt=NameAssignment)
|
||||
cdef check_definitions(ControlFlow flow, dict compiler_directives)
|
||||
|
||||
@cython.final
|
||||
cdef class ControlFlowAnalysis(CythonTransform):
|
||||
cdef object gv_ctx
|
||||
cdef object constant_folder
|
||||
cdef set reductions
|
||||
cdef list env_stack
|
||||
cdef list stack
|
||||
cdef object env
|
||||
cdef ControlFlow flow
|
||||
cdef bint in_inplace_assignment
|
||||
|
||||
cpdef mark_assignment(self, lhs, rhs=*)
|
||||
cpdef mark_position(self, node)
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,901 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
|
||||
from . import (ExprNodes, PyrexTypes, MemoryView,
|
||||
ParseTreeTransforms, StringEncoding, Errors)
|
||||
from .ExprNodes import CloneNode, ProxyNode, TupleNode
|
||||
from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode
|
||||
from ..Utils import OrderedSet
|
||||
|
||||
|
||||
class FusedCFuncDefNode(StatListNode):
|
||||
"""
|
||||
This node replaces a function with fused arguments. It deep-copies the
|
||||
function for every permutation of fused types, and allocates a new local
|
||||
scope for it. It keeps track of the original function in self.node, and
|
||||
the entry of the original function in the symbol table is given the
|
||||
'fused_cfunction' attribute which points back to us.
|
||||
Then when a function lookup occurs (to e.g. call it), the call can be
|
||||
dispatched to the right function.
|
||||
|
||||
node FuncDefNode the original function
|
||||
nodes [FuncDefNode] list of copies of node with different specific types
|
||||
py_func DefNode the fused python function subscriptable from
|
||||
Python space
|
||||
__signatures__ A DictNode mapping signature specialization strings
|
||||
to PyCFunction nodes
|
||||
resulting_fused_function PyCFunction for the fused DefNode that delegates
|
||||
to specializations
|
||||
fused_func_assignment Assignment of the fused function to the function name
|
||||
defaults_tuple TupleNode of defaults (letting PyCFunctionNode build
|
||||
defaults would result in many different tuples)
|
||||
specialized_pycfuncs List of synthesized pycfunction nodes for the
|
||||
specializations
|
||||
code_object CodeObjectNode shared by all specializations and the
|
||||
fused function
|
||||
|
||||
fused_compound_types All fused (compound) types (e.g. floating[:])
|
||||
"""
|
||||
|
||||
__signatures__ = None
|
||||
resulting_fused_function = None
|
||||
fused_func_assignment = None
|
||||
defaults_tuple = None
|
||||
decorators = None
|
||||
|
||||
child_attrs = StatListNode.child_attrs + [
|
||||
'__signatures__', 'resulting_fused_function', 'fused_func_assignment']
|
||||
|
||||
def __init__(self, node, env):
|
||||
super(FusedCFuncDefNode, self).__init__(node.pos)
|
||||
|
||||
self.nodes = []
|
||||
self.node = node
|
||||
|
||||
is_def = isinstance(self.node, DefNode)
|
||||
if is_def:
|
||||
# self.node.decorators = []
|
||||
self.copy_def(env)
|
||||
else:
|
||||
self.copy_cdef(env)
|
||||
|
||||
# Perform some sanity checks. If anything fails, it's a bug
|
||||
for n in self.nodes:
|
||||
assert not n.entry.type.is_fused
|
||||
assert not n.local_scope.return_type.is_fused
|
||||
if node.return_type.is_fused:
|
||||
assert not n.return_type.is_fused
|
||||
|
||||
if not is_def and n.cfunc_declarator.optional_arg_count:
|
||||
assert n.type.op_arg_struct
|
||||
|
||||
node.entry.fused_cfunction = self
|
||||
# Copy the nodes as AnalyseDeclarationsTransform will prepend
|
||||
# self.py_func to self.stats, as we only want specialized
|
||||
# CFuncDefNodes in self.nodes
|
||||
self.stats = self.nodes[:]
|
||||
|
||||
def copy_def(self, env):
|
||||
"""
|
||||
Create a copy of the original def or lambda function for specialized
|
||||
versions.
|
||||
"""
|
||||
fused_compound_types = PyrexTypes.unique(
|
||||
[arg.type for arg in self.node.args if arg.type.is_fused])
|
||||
fused_types = self._get_fused_base_types(fused_compound_types)
|
||||
permutations = PyrexTypes.get_all_specialized_permutations(fused_types)
|
||||
|
||||
self.fused_compound_types = fused_compound_types
|
||||
|
||||
if self.node.entry in env.pyfunc_entries:
|
||||
env.pyfunc_entries.remove(self.node.entry)
|
||||
|
||||
for cname, fused_to_specific in permutations:
|
||||
copied_node = copy.deepcopy(self.node)
|
||||
# keep signature object identity for special casing in DefNode.analyse_declarations()
|
||||
copied_node.entry.signature = self.node.entry.signature
|
||||
|
||||
self._specialize_function_args(copied_node.args, fused_to_specific)
|
||||
copied_node.return_type = self.node.return_type.specialize(
|
||||
fused_to_specific)
|
||||
|
||||
copied_node.analyse_declarations(env)
|
||||
# copied_node.is_staticmethod = self.node.is_staticmethod
|
||||
# copied_node.is_classmethod = self.node.is_classmethod
|
||||
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
||||
self.specialize_copied_def(copied_node, cname, self.node.entry,
|
||||
fused_to_specific, fused_compound_types)
|
||||
|
||||
PyrexTypes.specialize_entry(copied_node.entry, cname)
|
||||
copied_node.entry.used = True
|
||||
env.entries[copied_node.entry.name] = copied_node.entry
|
||||
|
||||
if not self.replace_fused_typechecks(copied_node):
|
||||
break
|
||||
|
||||
self.orig_py_func = self.node
|
||||
self.py_func = self.make_fused_cpdef(self.node, env, is_def=True)
|
||||
|
||||
def copy_cdef(self, env):
|
||||
"""
|
||||
Create a copy of the original c(p)def function for all specialized
|
||||
versions.
|
||||
"""
|
||||
permutations = self.node.type.get_all_specialized_permutations()
|
||||
# print 'Node %s has %d specializations:' % (self.node.entry.name,
|
||||
# len(permutations))
|
||||
# import pprint; pprint.pprint([d for cname, d in permutations])
|
||||
|
||||
# Prevent copying of the python function
|
||||
self.orig_py_func = orig_py_func = self.node.py_func
|
||||
self.node.py_func = None
|
||||
if orig_py_func:
|
||||
env.pyfunc_entries.remove(orig_py_func.entry)
|
||||
|
||||
fused_types = self.node.type.get_fused_types()
|
||||
self.fused_compound_types = fused_types
|
||||
|
||||
new_cfunc_entries = []
|
||||
for cname, fused_to_specific in permutations:
|
||||
copied_node = copy.deepcopy(self.node)
|
||||
|
||||
# Make the types in our CFuncType specific.
|
||||
type = copied_node.type.specialize(fused_to_specific)
|
||||
entry = copied_node.entry
|
||||
type.specialize_entry(entry, cname)
|
||||
|
||||
# Reuse existing Entries (e.g. from .pxd files).
|
||||
for i, orig_entry in enumerate(env.cfunc_entries):
|
||||
if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
|
||||
copied_node.entry = env.cfunc_entries[i]
|
||||
if not copied_node.entry.func_cname:
|
||||
copied_node.entry.func_cname = entry.func_cname
|
||||
entry = copied_node.entry
|
||||
type = entry.type
|
||||
break
|
||||
else:
|
||||
new_cfunc_entries.append(entry)
|
||||
|
||||
copied_node.type = type
|
||||
entry.type, type.entry = type, entry
|
||||
|
||||
entry.used = (entry.used or
|
||||
self.node.entry.defined_in_pxd or
|
||||
env.is_c_class_scope or
|
||||
entry.is_cmethod)
|
||||
|
||||
if self.node.cfunc_declarator.optional_arg_count:
|
||||
self.node.cfunc_declarator.declare_optional_arg_struct(
|
||||
type, env, fused_cname=cname)
|
||||
|
||||
copied_node.return_type = type.return_type
|
||||
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
||||
|
||||
# Make the argument types in the CFuncDeclarator specific
|
||||
self._specialize_function_args(copied_node.cfunc_declarator.args,
|
||||
fused_to_specific)
|
||||
|
||||
# If a cpdef, declare all specialized cpdefs (this
|
||||
# also calls analyse_declarations)
|
||||
copied_node.declare_cpdef_wrapper(env)
|
||||
if copied_node.py_func:
|
||||
env.pyfunc_entries.remove(copied_node.py_func.entry)
|
||||
|
||||
self.specialize_copied_def(
|
||||
copied_node.py_func, cname, self.node.entry.as_variable,
|
||||
fused_to_specific, fused_types)
|
||||
|
||||
if not self.replace_fused_typechecks(copied_node):
|
||||
break
|
||||
|
||||
# replace old entry with new entries
|
||||
try:
|
||||
cindex = env.cfunc_entries.index(self.node.entry)
|
||||
except ValueError:
|
||||
env.cfunc_entries.extend(new_cfunc_entries)
|
||||
else:
|
||||
env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
|
||||
|
||||
if orig_py_func:
|
||||
self.py_func = self.make_fused_cpdef(orig_py_func, env,
|
||||
is_def=False)
|
||||
else:
|
||||
self.py_func = orig_py_func
|
||||
|
||||
def _get_fused_base_types(self, fused_compound_types):
|
||||
"""
|
||||
Get a list of unique basic fused types, from a list of
|
||||
(possibly) compound fused types.
|
||||
"""
|
||||
base_types = []
|
||||
seen = set()
|
||||
for fused_type in fused_compound_types:
|
||||
fused_type.get_fused_types(result=base_types, seen=seen)
|
||||
return base_types
|
||||
|
||||
def _specialize_function_args(self, args, fused_to_specific):
|
||||
for arg in args:
|
||||
if arg.type.is_fused:
|
||||
arg.type = arg.type.specialize(fused_to_specific)
|
||||
if arg.type.is_memoryviewslice:
|
||||
arg.type.validate_memslice_dtype(arg.pos)
|
||||
|
||||
def create_new_local_scope(self, node, env, f2s):
|
||||
"""
|
||||
Create a new local scope for the copied node and append it to
|
||||
self.nodes. A new local scope is needed because the arguments with the
|
||||
fused types are already in the local scope, and we need the specialized
|
||||
entries created after analyse_declarations on each specialized version
|
||||
of the (CFunc)DefNode.
|
||||
f2s is a dict mapping each fused type to its specialized version
|
||||
"""
|
||||
node.create_local_scope(env)
|
||||
node.local_scope.fused_to_specific = f2s
|
||||
|
||||
# This is copied from the original function, set it to false to
|
||||
# stop recursion
|
||||
node.has_fused_arguments = False
|
||||
self.nodes.append(node)
|
||||
|
||||
def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types):
|
||||
"""Specialize the copy of a DefNode given the copied node,
|
||||
the specialization cname and the original DefNode entry"""
|
||||
fused_types = self._get_fused_base_types(fused_compound_types)
|
||||
type_strings = [
|
||||
PyrexTypes.specialization_signature_string(fused_type, f2s)
|
||||
for fused_type in fused_types
|
||||
]
|
||||
|
||||
node.specialized_signature_string = '|'.join(type_strings)
|
||||
|
||||
node.entry.pymethdef_cname = PyrexTypes.get_fused_cname(
|
||||
cname, node.entry.pymethdef_cname)
|
||||
node.entry.doc = py_entry.doc
|
||||
node.entry.doc_cname = py_entry.doc_cname
|
||||
|
||||
def replace_fused_typechecks(self, copied_node):
|
||||
"""
|
||||
Branch-prune fused type checks like
|
||||
|
||||
if fused_t is int:
|
||||
...
|
||||
|
||||
Returns whether an error was issued and whether we should stop in
|
||||
in order to prevent a flood of errors.
|
||||
"""
|
||||
num_errors = Errors.num_errors
|
||||
transform = ParseTreeTransforms.ReplaceFusedTypeChecks(
|
||||
copied_node.local_scope)
|
||||
transform(copied_node)
|
||||
|
||||
if Errors.num_errors > num_errors:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _fused_instance_checks(self, normal_types, pyx_code, env):
|
||||
"""
|
||||
Generate Cython code for instance checks, matching an object to
|
||||
specialized types.
|
||||
"""
|
||||
for specialized_type in normal_types:
|
||||
# all_numeric = all_numeric and specialized_type.is_numeric
|
||||
pyx_code.context.update(
|
||||
py_type_name=specialized_type.py_type_name(),
|
||||
specialized_type_name=specialized_type.specialization_string,
|
||||
)
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
if isinstance(arg, {{py_type_name}}):
|
||||
dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break
|
||||
""")
|
||||
|
||||
def _dtype_name(self, dtype):
|
||||
if dtype.is_typedef:
|
||||
return '___pyx_%s' % dtype
|
||||
return str(dtype).replace(' ', '_')
|
||||
|
||||
def _dtype_type(self, dtype):
|
||||
if dtype.is_typedef:
|
||||
return self._dtype_name(dtype)
|
||||
return str(dtype)
|
||||
|
||||
def _sizeof_dtype(self, dtype):
|
||||
if dtype.is_pyobject:
|
||||
return 'sizeof(void *)'
|
||||
else:
|
||||
return "sizeof(%s)" % self._dtype_type(dtype)
|
||||
|
||||
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
|
||||
"Setup some common cases to match dtypes against specializations"
|
||||
if pyx_code.indenter("if kind in b'iu':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_int")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'f':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_float")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'c':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_complex")
|
||||
pyx_code.dedent()
|
||||
|
||||
if pyx_code.indenter("elif kind == b'O':"):
|
||||
pyx_code.putln("pass")
|
||||
pyx_code.named_insertion_point("dtype_object")
|
||||
pyx_code.dedent()
|
||||
|
||||
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
|
||||
no_match = "dest_sig[{{dest_sig_idx}}] = None"
|
||||
def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
|
||||
"""
|
||||
Match a numpy dtype object to the individual specializations.
|
||||
"""
|
||||
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
|
||||
|
||||
for specialized_type in pythran_types+specialized_buffer_types:
|
||||
final_type = specialized_type
|
||||
if specialized_type.is_pythran_expr:
|
||||
specialized_type = specialized_type.org_buffer
|
||||
dtype = specialized_type.dtype
|
||||
pyx_code.context.update(
|
||||
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
|
||||
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
|
||||
dtype=dtype,
|
||||
specialized_type_name=final_type.specialization_string)
|
||||
|
||||
dtypes = [
|
||||
(dtype.is_int, pyx_code.dtype_int),
|
||||
(dtype.is_float, pyx_code.dtype_float),
|
||||
(dtype.is_complex, pyx_code.dtype_complex)
|
||||
]
|
||||
|
||||
for dtype_category, codewriter in dtypes:
|
||||
if dtype_category:
|
||||
cond = '{{itemsize_match}} and (<Py_ssize_t>arg.ndim) == %d' % (
|
||||
specialized_type.ndim,)
|
||||
if dtype.is_int:
|
||||
cond += ' and {{signed_match}}'
|
||||
|
||||
if final_type.is_pythran_expr:
|
||||
cond += ' and arg_is_pythran_compatible'
|
||||
|
||||
if codewriter.indenter("if %s:" % cond):
|
||||
#codewriter.putln("print 'buffer match found based on numpy dtype'")
|
||||
codewriter.putln(self.match)
|
||||
codewriter.putln("break")
|
||||
codewriter.dedent()
|
||||
|
||||
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
|
||||
specialized_type, env):
|
||||
"""
|
||||
For each specialized type, try to coerce the object to a memoryview
|
||||
slice of that type. This means obtaining a buffer and parsing the
|
||||
format string.
|
||||
TODO: separate buffer acquisition from format parsing
|
||||
"""
|
||||
dtype = specialized_type.dtype
|
||||
if specialized_type.is_buffer:
|
||||
axes = [('direct', 'strided')] * specialized_type.ndim
|
||||
else:
|
||||
axes = specialized_type.axes
|
||||
|
||||
memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes)
|
||||
memslice_type.create_from_py_utility_code(env)
|
||||
pyx_code.context.update(
|
||||
coerce_from_py_func=memslice_type.from_py_function,
|
||||
dtype=dtype)
|
||||
decl_code.putln(
|
||||
"{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
|
||||
|
||||
pyx_code.context.update(
|
||||
specialized_type_name=specialized_type.specialization_string,
|
||||
sizeof_dtype=self._sizeof_dtype(dtype))
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# try {{dtype}}
|
||||
if itemsize == -1 or itemsize == {{sizeof_dtype}}:
|
||||
memslice = {{coerce_from_py_func}}(arg, 0)
|
||||
if memslice.memview:
|
||||
__PYX_XDEC_MEMVIEW(&memslice, 1)
|
||||
# print 'found a match for the buffer through format parsing'
|
||||
%s
|
||||
break
|
||||
else:
|
||||
__pyx_PyErr_Clear()
|
||||
""" % self.match)
|
||||
|
||||
def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, env):
|
||||
"""
|
||||
Generate Cython code to match objects to buffer specializations.
|
||||
First try to get a numpy dtype object and match it against the individual
|
||||
specializations. If that fails, try naively to coerce the object
|
||||
to each specialization, which obtains the buffer each time and tries
|
||||
to match the format string.
|
||||
"""
|
||||
# The first thing to find a match in this loop breaks out of the loop
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
""" + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
|
||||
if ndarray is not None:
|
||||
if isinstance(arg, ndarray):
|
||||
dtype = arg.dtype
|
||||
""" + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
|
||||
elif __pyx_memoryview_check(arg):
|
||||
arg_base = arg.base
|
||||
if isinstance(arg_base, ndarray):
|
||||
dtype = arg_base.dtype
|
||||
else:
|
||||
dtype = None
|
||||
else:
|
||||
dtype = None
|
||||
|
||||
itemsize = -1
|
||||
if dtype is not None:
|
||||
itemsize = dtype.itemsize
|
||||
kind = ord(dtype.kind)
|
||||
dtype_signed = kind == 'i'
|
||||
""")
|
||||
pyx_code.indent(2)
|
||||
if pythran_types:
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# Pythran only supports the endianness of the current compiler
|
||||
byteorder = dtype.byteorder
|
||||
if byteorder == "<" and not __Pyx_Is_Little_Endian():
|
||||
arg_is_pythran_compatible = False
|
||||
elif byteorder == ">" and __Pyx_Is_Little_Endian():
|
||||
arg_is_pythran_compatible = False
|
||||
if arg_is_pythran_compatible:
|
||||
cur_stride = itemsize
|
||||
shape = arg.shape
|
||||
strides = arg.strides
|
||||
for i in range(arg.ndim-1, -1, -1):
|
||||
if (<Py_ssize_t>strides[i]) != cur_stride:
|
||||
arg_is_pythran_compatible = False
|
||||
break
|
||||
cur_stride *= <Py_ssize_t> shape[i]
|
||||
else:
|
||||
arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
|
||||
""")
|
||||
pyx_code.named_insertion_point("numpy_dtype_checks")
|
||||
self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
|
||||
pyx_code.dedent(2)
|
||||
|
||||
for specialized_type in buffer_types:
|
||||
self._buffer_parse_format_string_check(
|
||||
pyx_code, decl_code, specialized_type, env)
|
||||
|
||||
def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
|
||||
"""
|
||||
If we have any buffer specializations, write out some variable
|
||||
declarations and imports.
|
||||
"""
|
||||
decl_code.put_chunk(
|
||||
u"""
|
||||
ctypedef struct {{memviewslice_cname}}:
|
||||
void *memview
|
||||
|
||||
void __PYX_XDEC_MEMVIEW({{memviewslice_cname}} *, int have_gil)
|
||||
bint __pyx_memoryview_check(object)
|
||||
""")
|
||||
|
||||
pyx_code.local_variable_declarations.put_chunk(
|
||||
u"""
|
||||
cdef {{memviewslice_cname}} memslice
|
||||
cdef Py_ssize_t itemsize
|
||||
cdef bint dtype_signed
|
||||
cdef char kind
|
||||
|
||||
itemsize = -1
|
||||
""")
|
||||
|
||||
if pythran_types:
|
||||
pyx_code.local_variable_declarations.put_chunk(u"""
|
||||
cdef bint arg_is_pythran_compatible
|
||||
cdef Py_ssize_t cur_stride
|
||||
""")
|
||||
|
||||
pyx_code.imports.put_chunk(
|
||||
u"""
|
||||
cdef type ndarray
|
||||
ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
|
||||
""")
|
||||
|
||||
seen_typedefs = set()
|
||||
seen_int_dtypes = set()
|
||||
for buffer_type in all_buffer_types:
|
||||
dtype = buffer_type.dtype
|
||||
dtype_name = self._dtype_name(dtype)
|
||||
if dtype.is_typedef:
|
||||
if dtype_name not in seen_typedefs:
|
||||
seen_typedefs.add(dtype_name)
|
||||
decl_code.putln(
|
||||
'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name,
|
||||
dtype.empty_declaration_code()))
|
||||
|
||||
if buffer_type.dtype.is_int:
|
||||
if str(dtype) not in seen_int_dtypes:
|
||||
seen_int_dtypes.add(str(dtype))
|
||||
pyx_code.context.update(dtype_name=dtype_name,
|
||||
dtype_type=self._dtype_type(dtype))
|
||||
pyx_code.local_variable_declarations.put_chunk(
|
||||
u"""
|
||||
cdef bint {{dtype_name}}_is_signed
|
||||
{{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
|
||||
""")
|
||||
|
||||
def _split_fused_types(self, arg):
|
||||
"""
|
||||
Specialize fused types and split into normal types and buffer types.
|
||||
"""
|
||||
specialized_types = PyrexTypes.get_specialized_types(arg.type)
|
||||
|
||||
# Prefer long over int, etc by sorting (see type classes in PyrexTypes.py)
|
||||
specialized_types.sort()
|
||||
|
||||
seen_py_type_names = set()
|
||||
normal_types, buffer_types, pythran_types = [], [], []
|
||||
has_object_fallback = False
|
||||
for specialized_type in specialized_types:
|
||||
py_type_name = specialized_type.py_type_name()
|
||||
if py_type_name:
|
||||
if py_type_name in seen_py_type_names:
|
||||
continue
|
||||
seen_py_type_names.add(py_type_name)
|
||||
if py_type_name == 'object':
|
||||
has_object_fallback = True
|
||||
else:
|
||||
normal_types.append(specialized_type)
|
||||
elif specialized_type.is_pythran_expr:
|
||||
pythran_types.append(specialized_type)
|
||||
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
|
||||
buffer_types.append(specialized_type)
|
||||
|
||||
return normal_types, buffer_types, pythran_types, has_object_fallback
|
||||
|
||||
def _unpack_argument(self, pyx_code):
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
# PROCESSING ARGUMENT {{arg_tuple_idx}}
|
||||
if {{arg_tuple_idx}} < len(<tuple>args):
|
||||
arg = (<tuple>args)[{{arg_tuple_idx}}]
|
||||
elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
|
||||
arg = (<dict>kwargs)['{{arg.name}}']
|
||||
else:
|
||||
{{if arg.default}}
|
||||
arg = (<tuple>defaults)[{{default_idx}}]
|
||||
{{else}}
|
||||
{{if arg_tuple_idx < min_positional_args}}
|
||||
raise TypeError("Expected at least %d argument%s, got %d" % (
|
||||
{{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
|
||||
{{else}}
|
||||
raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
|
||||
{{endif}}
|
||||
{{endif}}
|
||||
""")
|
||||
|
||||
def make_fused_cpdef(self, orig_py_func, env, is_def):
|
||||
"""
|
||||
This creates the function that is indexable from Python and does
|
||||
runtime dispatch based on the argument types. The function gets the
|
||||
arg tuple and kwargs dict (or None) and the defaults tuple
|
||||
as arguments from the Binding Fused Function's tp_call.
|
||||
"""
|
||||
from . import TreeFragment, Code, UtilityCode
|
||||
|
||||
fused_types = self._get_fused_base_types([
|
||||
arg.type for arg in self.node.args if arg.type.is_fused])
|
||||
|
||||
context = {
|
||||
'memviewslice_cname': MemoryView.memviewslice_cname,
|
||||
'func_args': self.node.args,
|
||||
'n_fused': len(fused_types),
|
||||
'min_positional_args':
|
||||
self.node.num_required_args - self.node.num_required_kw_args
|
||||
if is_def else
|
||||
sum(1 for arg in self.node.args if arg.default is None),
|
||||
'name': orig_py_func.entry.name,
|
||||
}
|
||||
|
||||
pyx_code = Code.PyxCodeWriter(context=context)
|
||||
decl_code = Code.PyxCodeWriter(context=context)
|
||||
decl_code.put_chunk(
|
||||
u"""
|
||||
cdef extern from *:
|
||||
void __pyx_PyErr_Clear "PyErr_Clear" ()
|
||||
type __Pyx_ImportNumPyArrayTypeIfAvailable()
|
||||
int __Pyx_Is_Little_Endian()
|
||||
""")
|
||||
decl_code.indent()
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
def __pyx_fused_cpdef(signatures, args, kwargs, defaults):
|
||||
# FIXME: use a typed signature - currently fails badly because
|
||||
# default arguments inherit the types we specify here!
|
||||
|
||||
dest_sig = [None] * {{n_fused}}
|
||||
|
||||
if kwargs is not None and not kwargs:
|
||||
kwargs = None
|
||||
|
||||
cdef Py_ssize_t i
|
||||
|
||||
# instance check body
|
||||
""")
|
||||
|
||||
pyx_code.indent() # indent following code to function body
|
||||
pyx_code.named_insertion_point("imports")
|
||||
pyx_code.named_insertion_point("func_defs")
|
||||
pyx_code.named_insertion_point("local_variable_declarations")
|
||||
|
||||
fused_index = 0
|
||||
default_idx = 0
|
||||
all_buffer_types = OrderedSet()
|
||||
seen_fused_types = set()
|
||||
for i, arg in enumerate(self.node.args):
|
||||
if arg.type.is_fused:
|
||||
arg_fused_types = arg.type.get_fused_types()
|
||||
if len(arg_fused_types) > 1:
|
||||
raise NotImplementedError("Determination of more than one fused base "
|
||||
"type per argument is not implemented.")
|
||||
fused_type = arg_fused_types[0]
|
||||
|
||||
if arg.type.is_fused and fused_type not in seen_fused_types:
|
||||
seen_fused_types.add(fused_type)
|
||||
|
||||
context.update(
|
||||
arg_tuple_idx=i,
|
||||
arg=arg,
|
||||
dest_sig_idx=fused_index,
|
||||
default_idx=default_idx,
|
||||
)
|
||||
|
||||
normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
|
||||
self._unpack_argument(pyx_code)
|
||||
|
||||
# 'unrolled' loop, first match breaks out of it
|
||||
if pyx_code.indenter("while 1:"):
|
||||
if normal_types:
|
||||
self._fused_instance_checks(normal_types, pyx_code, env)
|
||||
if buffer_types or pythran_types:
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
|
||||
self._buffer_checks(buffer_types, pythran_types, pyx_code, decl_code, env)
|
||||
if has_object_fallback:
|
||||
pyx_code.context.update(specialized_type_name='object')
|
||||
pyx_code.putln(self.match)
|
||||
else:
|
||||
pyx_code.putln(self.no_match)
|
||||
pyx_code.putln("break")
|
||||
pyx_code.dedent()
|
||||
|
||||
fused_index += 1
|
||||
all_buffer_types.update(buffer_types)
|
||||
all_buffer_types.update(ty.org_buffer for ty in pythran_types)
|
||||
|
||||
if arg.default:
|
||||
default_idx += 1
|
||||
|
||||
if all_buffer_types:
|
||||
self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
|
||||
env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
|
||||
|
||||
pyx_code.put_chunk(
|
||||
u"""
|
||||
candidates = []
|
||||
for sig in <dict>signatures:
|
||||
match_found = False
|
||||
src_sig = sig.strip('()').split('|')
|
||||
for i in range(len(dest_sig)):
|
||||
dst_type = dest_sig[i]
|
||||
if dst_type is not None:
|
||||
if src_sig[i] == dst_type:
|
||||
match_found = True
|
||||
else:
|
||||
match_found = False
|
||||
break
|
||||
|
||||
if match_found:
|
||||
candidates.append(sig)
|
||||
|
||||
if not candidates:
|
||||
raise TypeError("No matching signature found")
|
||||
elif len(candidates) > 1:
|
||||
raise TypeError("Function call with ambiguous argument types")
|
||||
else:
|
||||
return (<dict>signatures)[candidates[0]]
|
||||
""")
|
||||
|
||||
fragment_code = pyx_code.getvalue()
|
||||
# print decl_code.getvalue()
|
||||
# print fragment_code
|
||||
from .Optimize import ConstantFolding
|
||||
fragment = TreeFragment.TreeFragment(
|
||||
fragment_code, level='module', pipeline=[ConstantFolding()])
|
||||
ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root)
|
||||
UtilityCode.declare_declarations_in_scope(
|
||||
decl_code.getvalue(), env.global_scope())
|
||||
ast.scope = env
|
||||
# FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self'
|
||||
ast.analyse_declarations(env)
|
||||
py_func = ast.stats[-1] # the DefNode
|
||||
self.fragment_scope = ast.scope
|
||||
|
||||
if isinstance(self.node, DefNode):
|
||||
py_func.specialized_cpdefs = self.nodes[:]
|
||||
else:
|
||||
py_func.specialized_cpdefs = [n.py_func for n in self.nodes]
|
||||
|
||||
return py_func
|
||||
|
||||
def update_fused_defnode_entry(self, env):
|
||||
copy_attributes = (
|
||||
'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname',
|
||||
'pymethdef_cname', 'doc', 'doc_cname', 'is_member',
|
||||
'scope'
|
||||
)
|
||||
|
||||
entry = self.py_func.entry
|
||||
|
||||
for attr in copy_attributes:
|
||||
setattr(entry, attr,
|
||||
getattr(self.orig_py_func.entry, attr))
|
||||
|
||||
self.py_func.name = self.orig_py_func.name
|
||||
self.py_func.doc = self.orig_py_func.doc
|
||||
|
||||
env.entries.pop('__pyx_fused_cpdef', None)
|
||||
if isinstance(self.node, DefNode):
|
||||
env.entries[entry.name] = entry
|
||||
else:
|
||||
env.entries[entry.name].as_variable = entry
|
||||
|
||||
env.pyfunc_entries.append(entry)
|
||||
|
||||
self.py_func.entry.fused_cfunction = self
|
||||
for node in self.nodes:
|
||||
if isinstance(self.node, DefNode):
|
||||
node.fused_py_func = self.py_func
|
||||
else:
|
||||
node.py_func.fused_py_func = self.py_func
|
||||
node.entry.as_variable = entry
|
||||
|
||||
self.synthesize_defnodes()
|
||||
self.stats.append(self.__signatures__)
|
||||
|
||||
def analyse_expressions(self, env):
|
||||
"""
|
||||
Analyse the expressions. Take care to only evaluate default arguments
|
||||
once and clone the result for all specializations
|
||||
"""
|
||||
for fused_compound_type in self.fused_compound_types:
|
||||
for fused_type in fused_compound_type.get_fused_types():
|
||||
for specialization_type in fused_type.types:
|
||||
if specialization_type.is_complex:
|
||||
specialization_type.create_declaration_utility_code(env)
|
||||
|
||||
if self.py_func:
|
||||
self.__signatures__ = self.__signatures__.analyse_expressions(env)
|
||||
self.py_func = self.py_func.analyse_expressions(env)
|
||||
self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env)
|
||||
self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env)
|
||||
|
||||
self.defaults = defaults = []
|
||||
|
||||
for arg in self.node.args:
|
||||
if arg.default:
|
||||
arg.default = arg.default.analyse_expressions(env)
|
||||
defaults.append(ProxyNode(arg.default))
|
||||
else:
|
||||
defaults.append(None)
|
||||
|
||||
for i, stat in enumerate(self.stats):
|
||||
stat = self.stats[i] = stat.analyse_expressions(env)
|
||||
if isinstance(stat, FuncDefNode):
|
||||
for arg, default in zip(stat.args, defaults):
|
||||
if default is not None:
|
||||
arg.default = CloneNode(default).coerce_to(arg.type, env)
|
||||
|
||||
if self.py_func:
|
||||
args = [CloneNode(default) for default in defaults if default]
|
||||
self.defaults_tuple = TupleNode(self.pos, args=args)
|
||||
self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env)
|
||||
self.defaults_tuple = ProxyNode(self.defaults_tuple)
|
||||
self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object)
|
||||
|
||||
fused_func = self.resulting_fused_function.arg
|
||||
fused_func.defaults_tuple = CloneNode(self.defaults_tuple)
|
||||
fused_func.code_object = CloneNode(self.code_object)
|
||||
|
||||
for i, pycfunc in enumerate(self.specialized_pycfuncs):
|
||||
pycfunc.code_object = CloneNode(self.code_object)
|
||||
pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env)
|
||||
pycfunc.defaults_tuple = CloneNode(self.defaults_tuple)
|
||||
return self
|
||||
|
||||
def synthesize_defnodes(self):
|
||||
"""
|
||||
Create the __signatures__ dict of PyCFunctionNode specializations.
|
||||
"""
|
||||
if isinstance(self.nodes[0], CFuncDefNode):
|
||||
nodes = [node.py_func for node in self.nodes]
|
||||
else:
|
||||
nodes = self.nodes
|
||||
|
||||
signatures = [StringEncoding.EncodedString(node.specialized_signature_string)
|
||||
for node in nodes]
|
||||
keys = [ExprNodes.StringNode(node.pos, value=sig)
|
||||
for node, sig in zip(nodes, signatures)]
|
||||
values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True)
|
||||
for node in nodes]
|
||||
|
||||
self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values))
|
||||
|
||||
self.specialized_pycfuncs = values
|
||||
for pycfuncnode in values:
|
||||
pycfuncnode.is_specialization = True
|
||||
|
||||
def generate_function_definitions(self, env, code):
|
||||
if self.py_func:
|
||||
self.py_func.pymethdef_required = True
|
||||
self.fused_func_assignment.generate_function_definitions(env, code)
|
||||
|
||||
for stat in self.stats:
|
||||
if isinstance(stat, FuncDefNode) and stat.entry.used:
|
||||
code.mark_pos(stat.pos)
|
||||
stat.generate_function_definitions(env, code)
|
||||
|
||||
def generate_execution_code(self, code):
|
||||
# Note: all def function specialization are wrapped in PyCFunction
|
||||
# nodes in the self.__signatures__ dictnode.
|
||||
for default in self.defaults:
|
||||
if default is not None:
|
||||
default.generate_evaluation_code(code)
|
||||
|
||||
if self.py_func:
|
||||
self.defaults_tuple.generate_evaluation_code(code)
|
||||
self.code_object.generate_evaluation_code(code)
|
||||
|
||||
for stat in self.stats:
|
||||
code.mark_pos(stat.pos)
|
||||
if isinstance(stat, ExprNodes.ExprNode):
|
||||
stat.generate_evaluation_code(code)
|
||||
else:
|
||||
stat.generate_execution_code(code)
|
||||
|
||||
if self.__signatures__:
|
||||
self.resulting_fused_function.generate_evaluation_code(code)
|
||||
|
||||
code.putln(
|
||||
"((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" %
|
||||
(self.resulting_fused_function.result(),
|
||||
self.__signatures__.result()))
|
||||
code.put_giveref(self.__signatures__.result())
|
||||
self.__signatures__.generate_post_assignment_code(code)
|
||||
self.__signatures__.free_temps(code)
|
||||
|
||||
self.fused_func_assignment.generate_execution_code(code)
|
||||
|
||||
# Dispose of results
|
||||
self.resulting_fused_function.generate_disposal_code(code)
|
||||
self.resulting_fused_function.free_temps(code)
|
||||
self.defaults_tuple.generate_disposal_code(code)
|
||||
self.defaults_tuple.free_temps(code)
|
||||
self.code_object.generate_disposal_code(code)
|
||||
self.code_object.free_temps(code)
|
||||
|
||||
for default in self.defaults:
|
||||
if default is not None:
|
||||
default.generate_disposal_code(code)
|
||||
default.free_temps(code)
|
||||
|
||||
def annotate(self, code):
|
||||
for stat in self.stats:
|
||||
stat.annotate(code)
|
@ -0,0 +1,15 @@
|
||||
def _get_feature(name):
|
||||
import __future__
|
||||
# fall back to a unique fake object for earlier Python versions or Python 3
|
||||
return getattr(__future__, name, object())
|
||||
|
||||
unicode_literals = _get_feature("unicode_literals")
|
||||
with_statement = _get_feature("with_statement") # dummy
|
||||
division = _get_feature("division")
|
||||
print_function = _get_feature("print_function")
|
||||
absolute_import = _get_feature("absolute_import")
|
||||
nested_scopes = _get_feature("nested_scopes") # dummy
|
||||
generators = _get_feature("generators") # dummy
|
||||
generator_stop = _get_feature("generator_stop")
|
||||
|
||||
del _get_feature
|
@ -0,0 +1,64 @@
|
||||
"""
|
||||
This module deals with interpreting the parse tree as Python
|
||||
would have done, in the compiler.
|
||||
|
||||
For now this only covers parse tree to value conversion of
|
||||
compile-time values.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Nodes import *
|
||||
from .ExprNodes import *
|
||||
from .Errors import CompileError
|
||||
|
||||
|
||||
class EmptyScope(object):
|
||||
def lookup(self, name):
|
||||
return None
|
||||
|
||||
empty_scope = EmptyScope()
|
||||
|
||||
def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
|
||||
"""
|
||||
Tries to interpret a list of compile time option nodes.
|
||||
The result will be a tuple (optlist, optdict) but where
|
||||
all expression nodes have been interpreted. The result is
|
||||
in the form of tuples (value, pos).
|
||||
|
||||
optlist is a list of nodes, while optdict is a DictNode (the
|
||||
result optdict is a dict)
|
||||
|
||||
If type_env is set, all type nodes will be analysed and the resulting
|
||||
type set. Otherwise only interpretateable ExprNodes
|
||||
are allowed, other nodes raises errors.
|
||||
|
||||
A CompileError will be raised if there are problems.
|
||||
"""
|
||||
|
||||
def interpret(node, ix):
|
||||
if ix in type_args:
|
||||
if type_env:
|
||||
type = node.analyse_as_type(type_env)
|
||||
if not type:
|
||||
raise CompileError(node.pos, "Invalid type.")
|
||||
return (type, node.pos)
|
||||
else:
|
||||
raise CompileError(node.pos, "Type not allowed here.")
|
||||
else:
|
||||
if (sys.version_info[0] >=3 and
|
||||
isinstance(node, StringNode) and
|
||||
node.unicode_value is not None):
|
||||
return (node.unicode_value, node.pos)
|
||||
return (node.compile_time_value(empty_scope), node.pos)
|
||||
|
||||
if optlist:
|
||||
optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
|
||||
if optdict:
|
||||
assert isinstance(optdict, DictNode)
|
||||
new_optdict = {}
|
||||
for item in optdict.key_value_pairs:
|
||||
new_key, dummy = interpret(item.key, None)
|
||||
new_optdict[new_key] = interpret(item.value, item.key.value)
|
||||
optdict = new_optdict
|
||||
return (optlist, new_optdict)
|
@ -0,0 +1,138 @@
|
||||
# cython: language_level=3, py2_import=True
|
||||
#
|
||||
# Cython Scanner - Lexical Definitions
|
||||
#
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
raw_prefixes = "rR"
|
||||
bytes_prefixes = "bB"
|
||||
string_prefixes = "fFuU" + bytes_prefixes
|
||||
char_prefixes = "cC"
|
||||
any_string_prefix = raw_prefixes + string_prefixes + char_prefixes
|
||||
IDENT = 'IDENT'
|
||||
|
||||
|
||||
def make_lexicon():
|
||||
from ..Plex import \
|
||||
Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
|
||||
TEXT, IGNORE, State, Lexicon
|
||||
from .Scanning import Method
|
||||
|
||||
letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
|
||||
digit = Any("0123456789")
|
||||
bindigit = Any("01")
|
||||
octdigit = Any("01234567")
|
||||
hexdigit = Any("0123456789ABCDEFabcdef")
|
||||
indentation = Bol + Rep(Any(" \t"))
|
||||
|
||||
def underscore_digits(d):
|
||||
return Rep1(d) + Rep(Str("_") + Rep1(d))
|
||||
|
||||
decimal = underscore_digits(digit)
|
||||
dot = Str(".")
|
||||
exponent = Any("Ee") + Opt(Any("+-")) + decimal
|
||||
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
|
||||
|
||||
name = letter + Rep(letter | digit)
|
||||
intconst = decimal | (Str("0") + ((Any("Xx") + underscore_digits(hexdigit)) |
|
||||
(Any("Oo") + underscore_digits(octdigit)) |
|
||||
(Any("Bb") + underscore_digits(bindigit)) ))
|
||||
intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu")))
|
||||
intliteral = intconst + intsuffix
|
||||
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
|
||||
imagconst = (intconst | fltconst) + Any("jJ")
|
||||
|
||||
# invalid combinations of prefixes are caught in p_string_literal
|
||||
beginstring = Opt(Rep(Any(string_prefixes + raw_prefixes)) |
|
||||
Any(char_prefixes)
|
||||
) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
|
||||
two_oct = octdigit + octdigit
|
||||
three_oct = octdigit + octdigit + octdigit
|
||||
two_hex = hexdigit + hexdigit
|
||||
four_hex = two_hex + two_hex
|
||||
escapeseq = Str("\\") + (two_oct | three_oct |
|
||||
Str('N{') + Rep(AnyBut('}')) + Str('}') |
|
||||
Str('u') + four_hex | Str('x') + two_hex |
|
||||
Str('U') + four_hex + four_hex | AnyChar)
|
||||
|
||||
bra = Any("([{")
|
||||
ket = Any(")]}")
|
||||
punct = Any(":,;+-*/|&<>=.%`~^?!@")
|
||||
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
|
||||
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
|
||||
"<<=", ">>=", "**=", "//=", "->", "@=")
|
||||
spaces = Rep1(Any(" \t\f"))
|
||||
escaped_newline = Str("\\\n")
|
||||
lineterm = Eol + Opt(Str("\n"))
|
||||
|
||||
comment = Str("#") + Rep(AnyBut("\n"))
|
||||
|
||||
return Lexicon([
|
||||
(name, IDENT),
|
||||
(intliteral, Method('strip_underscores', symbol='INT')),
|
||||
(fltconst, Method('strip_underscores', symbol='FLOAT')),
|
||||
(imagconst, Method('strip_underscores', symbol='IMAG')),
|
||||
(punct | diphthong, TEXT),
|
||||
|
||||
(bra, Method('open_bracket_action')),
|
||||
(ket, Method('close_bracket_action')),
|
||||
(lineterm, Method('newline_action')),
|
||||
|
||||
(beginstring, Method('begin_string_action')),
|
||||
|
||||
(comment, IGNORE),
|
||||
(spaces, IGNORE),
|
||||
(escaped_newline, IGNORE),
|
||||
|
||||
State('INDENT', [
|
||||
(comment + lineterm, Method('commentline')),
|
||||
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
|
||||
(indentation, Method('indentation_action')),
|
||||
(Eof, Method('eof_action'))
|
||||
]),
|
||||
|
||||
State('SQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
||||
(Str('"'), 'CHARS'),
|
||||
(Str("\n"), Method('unclosed_string_action')),
|
||||
(Str("'"), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('DQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut('"\n\\')), 'CHARS'),
|
||||
(Str("'"), 'CHARS'),
|
||||
(Str("\n"), Method('unclosed_string_action')),
|
||||
(Str('"'), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('TSQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
||||
(Any("'\""), 'CHARS'),
|
||||
(Str("\n"), 'NEWLINE'),
|
||||
(Str("'''"), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
State('TDQ_STRING', [
|
||||
(escapeseq, 'ESCAPE'),
|
||||
(Rep1(AnyBut('"\'\n\\')), 'CHARS'),
|
||||
(Any("'\""), 'CHARS'),
|
||||
(Str("\n"), 'NEWLINE'),
|
||||
(Str('"""'), Method('end_string_action')),
|
||||
(Eof, 'EOF')
|
||||
]),
|
||||
|
||||
(Eof, Method('eof_action'))
|
||||
],
|
||||
|
||||
# FIXME: Plex 1.9 needs different args here from Plex 1.1.4
|
||||
#debug_flags = scanner_debug_flags,
|
||||
#debug_file = scanner_dump_file
|
||||
)
|
||||
|
914
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Main.py
Normal file
914
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Main.py
Normal file
@ -0,0 +1,914 @@
|
||||
#
|
||||
# Cython Top Level
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import io
|
||||
|
||||
if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[:2] < (3, 3):
|
||||
sys.stderr.write("Sorry, Cython requires Python 2.6+ or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2]))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
from __builtin__ import basestring
|
||||
except ImportError:
|
||||
basestring = str
|
||||
|
||||
# Do not import Parsing here, import it when needed, because Parsing imports
|
||||
# Nodes, which globally needs debug command line options initialized to set a
|
||||
# conditional metaclass. These options are processed by CmdLine called from
|
||||
# main() in this file.
|
||||
# import Parsing
|
||||
from . import Errors
|
||||
from .StringEncoding import EncodedString
|
||||
from .Scanning import PyrexScanner, FileSourceDescriptor
|
||||
from .Errors import PyrexError, CompileError, error, warning
|
||||
from .Symtab import ModuleScope
|
||||
from .. import Utils
|
||||
from . import Options
|
||||
|
||||
from . import Version # legacy import needed by old PyTables versions
|
||||
version = Version.version # legacy attribute - use "Cython.__version__" instead
|
||||
|
||||
module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")
|
||||
|
||||
verbose = 0
|
||||
|
||||
standard_include_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
os.path.pardir, 'Includes'))
|
||||
|
||||
class CompilationData(object):
|
||||
# Bundles the information that is passed from transform to transform.
|
||||
# (For now, this is only)
|
||||
|
||||
# While Context contains every pxd ever loaded, path information etc.,
|
||||
# this only contains the data related to a single compilation pass
|
||||
#
|
||||
# pyx ModuleNode Main code tree of this compilation.
|
||||
# pxds {string : ModuleNode} Trees for the pxds used in the pyx.
|
||||
# codewriter CCodeWriter Where to output final code.
|
||||
# options CompilationOptions
|
||||
# result CompilationResult
|
||||
pass
|
||||
|
||||
|
||||
class Context(object):
|
||||
# This class encapsulates the context needed for compiling
|
||||
# one or more Cython implementation files along with their
|
||||
# associated and imported declaration files. It includes
|
||||
# the root of the module import namespace and the list
|
||||
# of directories to search for include files.
|
||||
#
|
||||
# modules {string : ModuleScope}
|
||||
# include_directories [string]
|
||||
# future_directives [object]
|
||||
# language_level int currently 2 or 3 for Python 2/3
|
||||
|
||||
cython_scope = None
|
||||
language_level = None # warn when not set but default to Py2
|
||||
|
||||
def __init__(self, include_directories, compiler_directives, cpp=False,
|
||||
language_level=None, options=None):
|
||||
# cython_scope is a hack, set to False by subclasses, in order to break
|
||||
# an infinite loop.
|
||||
# Better code organization would fix it.
|
||||
|
||||
from . import Builtin, CythonScope
|
||||
self.modules = {"__builtin__" : Builtin.builtin_scope}
|
||||
self.cython_scope = CythonScope.create_cython_scope(self)
|
||||
self.modules["cython"] = self.cython_scope
|
||||
self.include_directories = include_directories
|
||||
self.future_directives = set()
|
||||
self.compiler_directives = compiler_directives
|
||||
self.cpp = cpp
|
||||
self.options = options
|
||||
|
||||
self.pxds = {} # full name -> node tree
|
||||
self._interned = {} # (type(value), value, *key_args) -> interned_value
|
||||
|
||||
if language_level is not None:
|
||||
self.set_language_level(language_level)
|
||||
|
||||
self.gdb_debug_outputwriter = None
|
||||
|
||||
def set_language_level(self, level):
|
||||
from .Future import print_function, unicode_literals, absolute_import, division
|
||||
future_directives = set()
|
||||
if level == '3str':
|
||||
level = 3
|
||||
else:
|
||||
level = int(level)
|
||||
if level >= 3:
|
||||
future_directives.add(unicode_literals)
|
||||
if level >= 3:
|
||||
future_directives.update([print_function, absolute_import, division])
|
||||
self.language_level = level
|
||||
self.future_directives = future_directives
|
||||
if level >= 3:
|
||||
self.modules['builtins'] = self.modules['__builtin__']
|
||||
|
||||
def intern_ustring(self, value, encoding=None):
|
||||
key = (EncodedString, value, encoding)
|
||||
try:
|
||||
return self._interned[key]
|
||||
except KeyError:
|
||||
pass
|
||||
value = EncodedString(value)
|
||||
if encoding:
|
||||
value.encoding = encoding
|
||||
self._interned[key] = value
|
||||
return value
|
||||
|
||||
def intern_value(self, value, *key):
|
||||
key = (type(value), value) + key
|
||||
try:
|
||||
return self._interned[key]
|
||||
except KeyError:
|
||||
pass
|
||||
self._interned[key] = value
|
||||
return value
|
||||
|
||||
# pipeline creation functions can now be found in Pipeline.py
|
||||
|
||||
def process_pxd(self, source_desc, scope, module_name):
|
||||
from . import Pipeline
|
||||
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
|
||||
source = CompilationSource(source_desc, module_name, os.getcwd())
|
||||
result_sink = create_default_resultobj(source, self.options)
|
||||
pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink)
|
||||
result = Pipeline.run_pipeline(pipeline, source)
|
||||
else:
|
||||
pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name)
|
||||
result = Pipeline.run_pipeline(pipeline, source_desc)
|
||||
return result
|
||||
|
||||
def nonfatal_error(self, exc):
|
||||
return Errors.report_error(exc)
|
||||
|
||||
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1,
|
||||
absolute_fallback=True):
|
||||
# Finds and returns the module scope corresponding to
|
||||
# the given relative or absolute module name. If this
|
||||
# is the first time the module has been requested, finds
|
||||
# the corresponding .pxd file and process it.
|
||||
# If relative_to is not None, it must be a module scope,
|
||||
# and the module will first be searched for relative to
|
||||
# that module, provided its name is not a dotted name.
|
||||
debug_find_module = 0
|
||||
if debug_find_module:
|
||||
print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
|
||||
module_name, relative_to, pos, need_pxd))
|
||||
|
||||
scope = None
|
||||
pxd_pathname = None
|
||||
if relative_to:
|
||||
if module_name:
|
||||
# from .module import ...
|
||||
qualified_name = relative_to.qualify_name(module_name)
|
||||
else:
|
||||
# from . import ...
|
||||
qualified_name = relative_to.qualified_name
|
||||
scope = relative_to
|
||||
relative_to = None
|
||||
else:
|
||||
qualified_name = module_name
|
||||
|
||||
if not module_name_pattern.match(qualified_name):
|
||||
raise CompileError(pos or (module_name, 0, 0),
|
||||
"'%s' is not a valid module name" % module_name)
|
||||
|
||||
if relative_to:
|
||||
if debug_find_module:
|
||||
print("...trying relative import")
|
||||
scope = relative_to.lookup_submodule(module_name)
|
||||
if not scope:
|
||||
pxd_pathname = self.find_pxd_file(qualified_name, pos)
|
||||
if pxd_pathname:
|
||||
scope = relative_to.find_submodule(module_name)
|
||||
if not scope:
|
||||
if debug_find_module:
|
||||
print("...trying absolute import")
|
||||
if absolute_fallback:
|
||||
qualified_name = module_name
|
||||
scope = self
|
||||
for name in qualified_name.split("."):
|
||||
scope = scope.find_submodule(name)
|
||||
|
||||
if debug_find_module:
|
||||
print("...scope = %s" % scope)
|
||||
if not scope.pxd_file_loaded:
|
||||
if debug_find_module:
|
||||
print("...pxd not loaded")
|
||||
if not pxd_pathname:
|
||||
if debug_find_module:
|
||||
print("...looking for pxd file")
|
||||
# Only look in sys.path if we are explicitly looking
|
||||
# for a .pxd file.
|
||||
pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=need_pxd)
|
||||
if debug_find_module:
|
||||
print("......found %s" % pxd_pathname)
|
||||
if not pxd_pathname and need_pxd:
|
||||
# Set pxd_file_loaded such that we don't need to
|
||||
# look for the non-existing pxd file next time.
|
||||
scope.pxd_file_loaded = True
|
||||
package_pathname = self.search_include_directories(qualified_name, ".py", pos)
|
||||
if package_pathname and package_pathname.endswith('__init__.py'):
|
||||
pass
|
||||
else:
|
||||
error(pos, "'%s.pxd' not found" % qualified_name.replace('.', os.sep))
|
||||
if pxd_pathname:
|
||||
scope.pxd_file_loaded = True
|
||||
try:
|
||||
if debug_find_module:
|
||||
print("Context.find_module: Parsing %s" % pxd_pathname)
|
||||
rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
|
||||
if not pxd_pathname.endswith(rel_path):
|
||||
rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
|
||||
source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
|
||||
err, result = self.process_pxd(source_desc, scope, qualified_name)
|
||||
if err:
|
||||
raise err
|
||||
(pxd_codenodes, pxd_scope) = result
|
||||
self.pxds[module_name] = (pxd_codenodes, pxd_scope)
|
||||
except CompileError:
|
||||
pass
|
||||
return scope
|
||||
|
||||
def find_pxd_file(self, qualified_name, pos, sys_path=True):
|
||||
# Search include path (and sys.path if sys_path is True) for
|
||||
# the .pxd file corresponding to the given fully-qualified
|
||||
# module name.
|
||||
# Will find either a dotted filename or a file in a
|
||||
# package directory. If a source file position is given,
|
||||
# the directory containing the source file is searched first
|
||||
# for a dotted filename, and its containing package root
|
||||
# directory is searched first for a non-dotted filename.
|
||||
pxd = self.search_include_directories(qualified_name, ".pxd", pos, sys_path=sys_path)
|
||||
if pxd is None: # XXX Keep this until Includes/Deprecated is removed
|
||||
if (qualified_name.startswith('python') or
|
||||
qualified_name in ('stdlib', 'stdio', 'stl')):
|
||||
standard_include_path = os.path.abspath(os.path.normpath(
|
||||
os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
|
||||
deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
|
||||
self.include_directories.append(deprecated_include_path)
|
||||
try:
|
||||
pxd = self.search_include_directories(qualified_name, ".pxd", pos)
|
||||
finally:
|
||||
self.include_directories.pop()
|
||||
if pxd:
|
||||
name = qualified_name
|
||||
if name.startswith('python'):
|
||||
warning(pos, "'%s' is deprecated, use 'cpython'" % name, 1)
|
||||
elif name in ('stdlib', 'stdio'):
|
||||
warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
|
||||
elif name in ('stl'):
|
||||
warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
|
||||
if pxd is None and Options.cimport_from_pyx:
|
||||
return self.find_pyx_file(qualified_name, pos)
|
||||
return pxd
|
||||
|
||||
def find_pyx_file(self, qualified_name, pos):
|
||||
# Search include path for the .pyx file corresponding to the
|
||||
# given fully-qualified module name, as for find_pxd_file().
|
||||
return self.search_include_directories(qualified_name, ".pyx", pos)
|
||||
|
||||
def find_include_file(self, filename, pos):
|
||||
# Search list of include directories for filename.
|
||||
# Reports an error and returns None if not found.
|
||||
path = self.search_include_directories(filename, "", pos,
|
||||
include=True)
|
||||
if not path:
|
||||
error(pos, "'%s' not found" % filename)
|
||||
return path
|
||||
|
||||
def search_include_directories(self, qualified_name, suffix, pos,
|
||||
include=False, sys_path=False):
|
||||
include_dirs = self.include_directories
|
||||
if sys_path:
|
||||
include_dirs = include_dirs + sys.path
|
||||
# include_dirs must be hashable for caching in @cached_function
|
||||
include_dirs = tuple(include_dirs + [standard_include_path])
|
||||
return search_include_directories(include_dirs, qualified_name,
|
||||
suffix, pos, include)
|
||||
|
||||
def find_root_package_dir(self, file_path):
|
||||
return Utils.find_root_package_dir(file_path)
|
||||
|
||||
def check_package_dir(self, dir, package_names):
|
||||
return Utils.check_package_dir(dir, tuple(package_names))
|
||||
|
||||
def c_file_out_of_date(self, source_path, output_path):
|
||||
if not os.path.exists(output_path):
|
||||
return 1
|
||||
c_time = Utils.modification_time(output_path)
|
||||
if Utils.file_newer_than(source_path, c_time):
|
||||
return 1
|
||||
pos = [source_path]
|
||||
pxd_path = Utils.replace_suffix(source_path, ".pxd")
|
||||
if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
|
||||
return 1
|
||||
for kind, name in self.read_dependency_file(source_path):
|
||||
if kind == "cimport":
|
||||
dep_path = self.find_pxd_file(name, pos)
|
||||
elif kind == "include":
|
||||
dep_path = self.search_include_directories(name, pos)
|
||||
else:
|
||||
continue
|
||||
if dep_path and Utils.file_newer_than(dep_path, c_time):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def find_cimported_module_names(self, source_path):
|
||||
return [ name for kind, name in self.read_dependency_file(source_path)
|
||||
if kind == "cimport" ]
|
||||
|
||||
def is_package_dir(self, dir_path):
|
||||
return Utils.is_package_dir(dir_path)
|
||||
|
||||
def read_dependency_file(self, source_path):
|
||||
dep_path = Utils.replace_suffix(source_path, ".dep")
|
||||
if os.path.exists(dep_path):
|
||||
f = open(dep_path, "rU")
|
||||
chunks = [ line.strip().split(" ", 1)
|
||||
for line in f.readlines()
|
||||
if " " in line.strip() ]
|
||||
f.close()
|
||||
return chunks
|
||||
else:
|
||||
return ()
|
||||
|
||||
def lookup_submodule(self, name):
|
||||
# Look up a top-level module. Returns None if not found.
|
||||
return self.modules.get(name, None)
|
||||
|
||||
def find_submodule(self, name):
|
||||
# Find a top-level module, creating a new one if needed.
|
||||
scope = self.lookup_submodule(name)
|
||||
if not scope:
|
||||
scope = ModuleScope(name,
|
||||
parent_module = None, context = self)
|
||||
self.modules[name] = scope
|
||||
return scope
|
||||
|
||||
def parse(self, source_desc, scope, pxd, full_module_name):
|
||||
if not isinstance(source_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
source_filename = source_desc.filename
|
||||
scope.cpp = self.cpp
|
||||
# Parse the given source file and return a parse tree.
|
||||
num_errors = Errors.num_errors
|
||||
try:
|
||||
with Utils.open_source_file(source_filename) as f:
|
||||
from . import Parsing
|
||||
s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
|
||||
scope = scope, context = self)
|
||||
tree = Parsing.p_module(s, pxd, full_module_name)
|
||||
if self.options.formal_grammar:
|
||||
try:
|
||||
from ..Parser import ConcreteSyntaxTree
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"Formal grammar can only be used with compiled Cython with an available pgen.")
|
||||
ConcreteSyntaxTree.p_module(source_filename)
|
||||
except UnicodeDecodeError as e:
|
||||
#import traceback
|
||||
#traceback.print_exc()
|
||||
raise self._report_decode_error(source_desc, e)
|
||||
|
||||
if Errors.num_errors > num_errors:
|
||||
raise CompileError()
|
||||
return tree
|
||||
|
||||
def _report_decode_error(self, source_desc, exc):
|
||||
msg = exc.args[-1]
|
||||
position = exc.args[2]
|
||||
encoding = exc.args[0]
|
||||
|
||||
line = 1
|
||||
column = idx = 0
|
||||
with io.open(source_desc.filename, "r", encoding='iso8859-1', newline='') as f:
|
||||
for line, data in enumerate(f, 1):
|
||||
idx += len(data)
|
||||
if idx >= position:
|
||||
column = position - (idx - len(data)) + 1
|
||||
break
|
||||
|
||||
return error((source_desc, line, column),
|
||||
"Decoding error, missing or incorrect coding=<encoding-name> "
|
||||
"at top of source (cannot decode with encoding %r: %s)" % (encoding, msg))
|
||||
|
||||
def extract_module_name(self, path, options):
|
||||
# Find fully_qualified module name from the full pathname
|
||||
# of a source file.
|
||||
dir, filename = os.path.split(path)
|
||||
module_name, _ = os.path.splitext(filename)
|
||||
if "." in module_name:
|
||||
return module_name
|
||||
names = [module_name]
|
||||
while self.is_package_dir(dir):
|
||||
parent, package_name = os.path.split(dir)
|
||||
if parent == dir:
|
||||
break
|
||||
names.append(package_name)
|
||||
dir = parent
|
||||
names.reverse()
|
||||
return ".".join(names)
|
||||
|
||||
def setup_errors(self, options, result):
|
||||
Errors.reset() # clear any remaining error state
|
||||
if options.use_listing_file:
|
||||
path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis")
|
||||
else:
|
||||
path = None
|
||||
Errors.open_listing_file(path=path,
|
||||
echo_to_stderr=options.errors_to_stderr)
|
||||
|
||||
def teardown_errors(self, err, options, result):
|
||||
source_desc = result.compilation_source.source_desc
|
||||
if not isinstance(source_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
Errors.close_listing_file()
|
||||
result.num_errors = Errors.num_errors
|
||||
if result.num_errors > 0:
|
||||
err = True
|
||||
if err and result.c_file:
|
||||
try:
|
||||
Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
|
||||
except EnvironmentError:
|
||||
pass
|
||||
result.c_file = None
|
||||
|
||||
|
||||
def get_output_filename(source_filename, cwd, options):
|
||||
if options.cplus:
|
||||
c_suffix = ".cpp"
|
||||
else:
|
||||
c_suffix = ".c"
|
||||
suggested_file_name = Utils.replace_suffix(source_filename, c_suffix)
|
||||
if options.output_file:
|
||||
out_path = os.path.join(cwd, options.output_file)
|
||||
if os.path.isdir(out_path):
|
||||
return os.path.join(out_path, os.path.basename(suggested_file_name))
|
||||
else:
|
||||
return out_path
|
||||
else:
|
||||
return suggested_file_name
|
||||
|
||||
|
||||
def create_default_resultobj(compilation_source, options):
|
||||
result = CompilationResult()
|
||||
result.main_source_file = compilation_source.source_desc.filename
|
||||
result.compilation_source = compilation_source
|
||||
source_desc = compilation_source.source_desc
|
||||
result.c_file = get_output_filename(source_desc.filename,
|
||||
compilation_source.cwd, options)
|
||||
result.embedded_metadata = options.embedded_metadata
|
||||
return result
|
||||
|
||||
|
||||
def run_pipeline(source, options, full_module_name=None, context=None):
|
||||
from . import Pipeline
|
||||
|
||||
source_ext = os.path.splitext(source)[1]
|
||||
options.configure_language_defaults(source_ext[1:]) # py/pyx
|
||||
if context is None:
|
||||
context = options.create_context()
|
||||
|
||||
# Set up source object
|
||||
cwd = os.getcwd()
|
||||
abs_path = os.path.abspath(source)
|
||||
full_module_name = full_module_name or context.extract_module_name(source, options)
|
||||
|
||||
Utils.raise_error_if_module_name_forbidden(full_module_name)
|
||||
|
||||
if options.relative_path_in_code_position_comments:
|
||||
rel_path = full_module_name.replace('.', os.sep) + source_ext
|
||||
if not abs_path.endswith(rel_path):
|
||||
rel_path = source # safety measure to prevent printing incorrect paths
|
||||
else:
|
||||
rel_path = abs_path
|
||||
source_desc = FileSourceDescriptor(abs_path, rel_path)
|
||||
source = CompilationSource(source_desc, full_module_name, cwd)
|
||||
|
||||
# Set up result object
|
||||
result = create_default_resultobj(source, options)
|
||||
|
||||
if options.annotate is None:
|
||||
# By default, decide based on whether an html file already exists.
|
||||
html_filename = os.path.splitext(result.c_file)[0] + ".html"
|
||||
if os.path.exists(html_filename):
|
||||
with io.open(html_filename, "r", encoding="UTF-8") as html_file:
|
||||
if u'<!-- Generated by Cython' in html_file.read(100):
|
||||
options.annotate = True
|
||||
|
||||
# Get pipeline
|
||||
if source_ext.lower() == '.py' or not source_ext:
|
||||
pipeline = Pipeline.create_py_pipeline(context, options, result)
|
||||
else:
|
||||
pipeline = Pipeline.create_pyx_pipeline(context, options, result)
|
||||
|
||||
context.setup_errors(options, result)
|
||||
err, enddata = Pipeline.run_pipeline(pipeline, source)
|
||||
context.teardown_errors(err, options, result)
|
||||
if options.depfile:
|
||||
from ..Build.Dependencies import create_dependency_tree
|
||||
dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file)
|
||||
Utils.write_depfile(result.c_file, result.main_source_file, dependencies)
|
||||
return result
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Main Python entry points
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
class CompilationSource(object):
|
||||
"""
|
||||
Contains the data necessary to start up a compilation pipeline for
|
||||
a single compilation unit.
|
||||
"""
|
||||
def __init__(self, source_desc, full_module_name, cwd):
|
||||
self.source_desc = source_desc
|
||||
self.full_module_name = full_module_name
|
||||
self.cwd = cwd
|
||||
|
||||
|
||||
class CompilationOptions(object):
|
||||
r"""
|
||||
See default_options at the end of this module for a list of all possible
|
||||
options and CmdLine.usage and CmdLine.parse_command_line() for their
|
||||
meaning.
|
||||
"""
|
||||
def __init__(self, defaults=None, **kw):
|
||||
self.include_path = []
|
||||
if defaults:
|
||||
if isinstance(defaults, CompilationOptions):
|
||||
defaults = defaults.__dict__
|
||||
else:
|
||||
defaults = default_options
|
||||
|
||||
options = dict(defaults)
|
||||
options.update(kw)
|
||||
|
||||
# let's assume 'default_options' contains a value for most known compiler options
|
||||
# and validate against them
|
||||
unknown_options = set(options) - set(default_options)
|
||||
# ignore valid options that are not in the defaults
|
||||
unknown_options.difference_update(['include_path'])
|
||||
if unknown_options:
|
||||
message = "got unknown compilation option%s, please remove: %s" % (
|
||||
's' if len(unknown_options) > 1 else '',
|
||||
', '.join(unknown_options))
|
||||
raise ValueError(message)
|
||||
|
||||
directive_defaults = Options.get_directive_defaults()
|
||||
directives = dict(options['compiler_directives']) # copy mutable field
|
||||
# check for invalid directives
|
||||
unknown_directives = set(directives) - set(directive_defaults)
|
||||
if unknown_directives:
|
||||
message = "got unknown compiler directive%s: %s" % (
|
||||
's' if len(unknown_directives) > 1 else '',
|
||||
', '.join(unknown_directives))
|
||||
raise ValueError(message)
|
||||
options['compiler_directives'] = directives
|
||||
if directives.get('np_pythran', False) and not options['cplus']:
|
||||
import warnings
|
||||
warnings.warn("C++ mode forced when in Pythran mode!")
|
||||
options['cplus'] = True
|
||||
if 'language_level' in directives and 'language_level' not in kw:
|
||||
options['language_level'] = directives['language_level']
|
||||
elif not options.get('language_level'):
|
||||
options['language_level'] = directive_defaults.get('language_level')
|
||||
if 'formal_grammar' in directives and 'formal_grammar' not in kw:
|
||||
options['formal_grammar'] = directives['formal_grammar']
|
||||
if options['cache'] is True:
|
||||
options['cache'] = os.path.join(Utils.get_cython_cache_dir(), 'compiler')
|
||||
|
||||
self.__dict__.update(options)
|
||||
|
||||
def configure_language_defaults(self, source_extension):
|
||||
if source_extension == 'py':
|
||||
if self.compiler_directives.get('binding') is None:
|
||||
self.compiler_directives['binding'] = True
|
||||
|
||||
def create_context(self):
|
||||
return Context(self.include_path, self.compiler_directives,
|
||||
self.cplus, self.language_level, options=self)
|
||||
|
||||
def get_fingerprint(self):
|
||||
r"""
|
||||
Return a string that contains all the options that are relevant for cache invalidation.
|
||||
"""
|
||||
# Collect only the data that can affect the generated file(s).
|
||||
data = {}
|
||||
|
||||
for key, value in self.__dict__.items():
|
||||
if key in ['show_version', 'errors_to_stderr', 'verbose', 'quiet']:
|
||||
# verbosity flags have no influence on the compilation result
|
||||
continue
|
||||
elif key in ['output_file', 'output_dir']:
|
||||
# ignore the exact name of the output file
|
||||
continue
|
||||
elif key in ['timestamps']:
|
||||
# the cache cares about the content of files, not about the timestamps of sources
|
||||
continue
|
||||
elif key in ['cache']:
|
||||
# hopefully caching has no influence on the compilation result
|
||||
continue
|
||||
elif key in ['compiler_directives']:
|
||||
# directives passed on to the C compiler do not influence the generated C code
|
||||
continue
|
||||
elif key in ['include_path']:
|
||||
# this path changes which headers are tracked as dependencies,
|
||||
# it has no influence on the generated C code
|
||||
continue
|
||||
elif key in ['working_path']:
|
||||
# this path changes where modules and pxd files are found;
|
||||
# their content is part of the fingerprint anyway, their
|
||||
# absolute path does not matter
|
||||
continue
|
||||
elif key in ['create_extension']:
|
||||
# create_extension() has already mangled the options, e.g.,
|
||||
# embedded_metadata, when the fingerprint is computed so we
|
||||
# ignore it here.
|
||||
continue
|
||||
elif key in ['build_dir']:
|
||||
# the (temporary) directory where we collect dependencies
|
||||
# has no influence on the C output
|
||||
continue
|
||||
elif key in ['use_listing_file', 'generate_pxi', 'annotate', 'annotate_coverage_xml']:
|
||||
# all output files are contained in the cache so the types of
|
||||
# files generated must be part of the fingerprint
|
||||
data[key] = value
|
||||
elif key in ['formal_grammar', 'evaluate_tree_assertions']:
|
||||
# these bits can change whether compilation to C passes/fails
|
||||
data[key] = value
|
||||
elif key in ['embedded_metadata', 'emit_linenums', 'c_line_in_traceback', 'gdb_debug', 'relative_path_in_code_position_comments']:
|
||||
# the generated code contains additional bits when these are set
|
||||
data[key] = value
|
||||
elif key in ['cplus', 'language_level', 'compile_time_env', 'np_pythran']:
|
||||
# assorted bits that, e.g., influence the parser
|
||||
data[key] = value
|
||||
elif key == ['capi_reexport_cincludes']:
|
||||
if self.capi_reexport_cincludes:
|
||||
# our caching implementation does not yet include fingerprints of all the header files
|
||||
raise NotImplementedError('capi_reexport_cincludes is not compatible with Cython caching')
|
||||
elif key == ['common_utility_include_dir']:
|
||||
if self.common_utility_include_dir:
|
||||
raise NotImplementedError('common_utility_include_dir is not compatible with Cython caching yet')
|
||||
else:
|
||||
# any unexpected option should go into the fingerprint; it's better
|
||||
# to recompile than to return incorrect results from the cache.
|
||||
data[key] = value
|
||||
|
||||
def to_fingerprint(item):
|
||||
r"""
|
||||
Recursively turn item into a string, turning dicts into lists with
|
||||
deterministic ordering.
|
||||
"""
|
||||
if isinstance(item, dict):
|
||||
item = sorted([(repr(key), to_fingerprint(value)) for key, value in item.items()])
|
||||
return repr(item)
|
||||
|
||||
return to_fingerprint(data)
|
||||
|
||||
|
||||
class CompilationResult(object):
|
||||
"""
|
||||
Results from the Cython compiler:
|
||||
|
||||
c_file string or None The generated C source file
|
||||
h_file string or None The generated C header file
|
||||
i_file string or None The generated .pxi file
|
||||
api_file string or None The generated C API .h file
|
||||
listing_file string or None File of error messages
|
||||
object_file string or None Result of compiling the C file
|
||||
extension_file string or None Result of linking the object file
|
||||
num_errors integer Number of compilation errors
|
||||
compilation_source CompilationSource
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.c_file = None
|
||||
self.h_file = None
|
||||
self.i_file = None
|
||||
self.api_file = None
|
||||
self.listing_file = None
|
||||
self.object_file = None
|
||||
self.extension_file = None
|
||||
self.main_source_file = None
|
||||
|
||||
|
||||
class CompilationResultSet(dict):
|
||||
"""
|
||||
Results from compiling multiple Pyrex source files. A mapping
|
||||
from source file paths to CompilationResult instances. Also
|
||||
has the following attributes:
|
||||
|
||||
num_errors integer Total number of compilation errors
|
||||
"""
|
||||
|
||||
num_errors = 0
|
||||
|
||||
def add(self, source, result):
|
||||
self[source] = result
|
||||
self.num_errors += result.num_errors
|
||||
|
||||
|
||||
def compile_single(source, options, full_module_name = None):
|
||||
"""
|
||||
compile_single(source, options, full_module_name)
|
||||
|
||||
Compile the given Pyrex implementation file and return a CompilationResult.
|
||||
Always compiles a single file; does not perform timestamp checking or
|
||||
recursion.
|
||||
"""
|
||||
return run_pipeline(source, options, full_module_name)
|
||||
|
||||
|
||||
def compile_multiple(sources, options):
|
||||
"""
|
||||
compile_multiple(sources, options)
|
||||
|
||||
Compiles the given sequence of Pyrex implementation files and returns
|
||||
a CompilationResultSet. Performs timestamp checking and/or recursion
|
||||
if these are specified in the options.
|
||||
"""
|
||||
if options.module_name and len(sources) > 1:
|
||||
raise RuntimeError('Full module name can only be set '
|
||||
'for single source compilation')
|
||||
# run_pipeline creates the context
|
||||
# context = options.create_context()
|
||||
sources = [os.path.abspath(source) for source in sources]
|
||||
processed = set()
|
||||
results = CompilationResultSet()
|
||||
timestamps = options.timestamps
|
||||
verbose = options.verbose
|
||||
context = None
|
||||
cwd = os.getcwd()
|
||||
for source in sources:
|
||||
if source not in processed:
|
||||
if context is None:
|
||||
context = options.create_context()
|
||||
output_filename = get_output_filename(source, cwd, options)
|
||||
out_of_date = context.c_file_out_of_date(source, output_filename)
|
||||
if (not timestamps) or out_of_date:
|
||||
if verbose:
|
||||
sys.stderr.write("Compiling %s\n" % source)
|
||||
result = run_pipeline(source, options,
|
||||
full_module_name=options.module_name,
|
||||
context=context)
|
||||
results.add(source, result)
|
||||
# Compiling multiple sources in one context doesn't quite
|
||||
# work properly yet.
|
||||
context = None
|
||||
processed.add(source)
|
||||
return results
|
||||
|
||||
|
||||
def compile(source, options = None, full_module_name = None, **kwds):
|
||||
"""
|
||||
compile(source [, options], [, <option> = <value>]...)
|
||||
|
||||
Compile one or more Pyrex implementation files, with optional timestamp
|
||||
checking and recursing on dependencies. The source argument may be a string
|
||||
or a sequence of strings. If it is a string and no recursion or timestamp
|
||||
checking is requested, a CompilationResult is returned, otherwise a
|
||||
CompilationResultSet is returned.
|
||||
"""
|
||||
options = CompilationOptions(defaults = options, **kwds)
|
||||
if isinstance(source, basestring) and not options.timestamps:
|
||||
return compile_single(source, options, full_module_name)
|
||||
else:
|
||||
return compile_multiple(source, options)
|
||||
|
||||
|
||||
@Utils.cached_function
|
||||
def search_include_directories(dirs, qualified_name, suffix, pos, include=False):
|
||||
"""
|
||||
Search the list of include directories for the given file name.
|
||||
|
||||
If a source file position is given, first searches the directory
|
||||
containing that file. Returns None if not found, but does not
|
||||
report an error.
|
||||
|
||||
The 'include' option will disable package dereferencing.
|
||||
"""
|
||||
|
||||
if pos:
|
||||
file_desc = pos[0]
|
||||
if not isinstance(file_desc, FileSourceDescriptor):
|
||||
raise RuntimeError("Only file sources for code supported")
|
||||
if include:
|
||||
dirs = (os.path.dirname(file_desc.filename),) + dirs
|
||||
else:
|
||||
dirs = (Utils.find_root_package_dir(file_desc.filename),) + dirs
|
||||
|
||||
dotted_filename = qualified_name
|
||||
if suffix:
|
||||
dotted_filename += suffix
|
||||
|
||||
if not include:
|
||||
names = qualified_name.split('.')
|
||||
package_names = tuple(names[:-1])
|
||||
module_name = names[-1]
|
||||
module_filename = module_name + suffix
|
||||
package_filename = "__init__" + suffix
|
||||
|
||||
for dirname in dirs:
|
||||
path = os.path.join(dirname, dotted_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
if not include:
|
||||
package_dir = Utils.check_package_dir(dirname, package_names)
|
||||
if package_dir is not None:
|
||||
path = os.path.join(package_dir, module_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
path = os.path.join(package_dir, module_name,
|
||||
package_filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Main command-line entry point
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
def setuptools_main():
|
||||
return main(command_line = 1)
|
||||
|
||||
|
||||
def main(command_line = 0):
|
||||
args = sys.argv[1:]
|
||||
any_failures = 0
|
||||
if command_line:
|
||||
from .CmdLine import parse_command_line
|
||||
options, sources = parse_command_line(args)
|
||||
else:
|
||||
options = CompilationOptions(default_options)
|
||||
sources = args
|
||||
|
||||
if options.show_version:
|
||||
sys.stderr.write("Cython version %s\n" % version)
|
||||
if options.working_path!="":
|
||||
os.chdir(options.working_path)
|
||||
try:
|
||||
result = compile(sources, options)
|
||||
if result.num_errors > 0:
|
||||
any_failures = 1
|
||||
except (EnvironmentError, PyrexError) as e:
|
||||
sys.stderr.write(str(e) + '\n')
|
||||
any_failures = 1
|
||||
if any_failures:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
#
|
||||
# Set the default options depending on the platform
|
||||
#
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
default_options = dict(
|
||||
show_version = 0,
|
||||
use_listing_file = 0,
|
||||
errors_to_stderr = 1,
|
||||
cplus = 0,
|
||||
output_file = None,
|
||||
depfile = None,
|
||||
annotate = None,
|
||||
annotate_coverage_xml = None,
|
||||
generate_pxi = 0,
|
||||
capi_reexport_cincludes = 0,
|
||||
working_path = "",
|
||||
timestamps = None,
|
||||
verbose = 0,
|
||||
quiet = 0,
|
||||
compiler_directives = {},
|
||||
embedded_metadata = {},
|
||||
evaluate_tree_assertions = False,
|
||||
emit_linenums = False,
|
||||
relative_path_in_code_position_comments = True,
|
||||
c_line_in_traceback = True,
|
||||
language_level = None, # warn but default to 2
|
||||
formal_grammar = False,
|
||||
gdb_debug = False,
|
||||
compile_time_env = None,
|
||||
common_utility_include_dir = None,
|
||||
output_dir=None,
|
||||
build_dir=None,
|
||||
cache=None,
|
||||
create_extension=None,
|
||||
module_name=None,
|
||||
np_pythran=False
|
||||
)
|
@ -0,0 +1,858 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .Errors import CompileError, error
|
||||
from . import ExprNodes
|
||||
from .ExprNodes import IntNode, NameNode, AttributeNode
|
||||
from . import Options
|
||||
from .Code import UtilityCode, TempitaUtilityCode
|
||||
from .UtilityCode import CythonUtilityCode
|
||||
from . import Buffer
|
||||
from . import PyrexTypes
|
||||
from . import ModuleNode
|
||||
|
||||
START_ERR = "Start must not be given."
|
||||
STOP_ERR = "Axis specification only allowed in the 'step' slot."
|
||||
STEP_ERR = "Step must be omitted, 1, or a valid specifier."
|
||||
BOTH_CF_ERR = "Cannot specify an array that is both C and Fortran contiguous."
|
||||
INVALID_ERR = "Invalid axis specification."
|
||||
NOT_CIMPORTED_ERR = "Variable was not cimported from cython.view"
|
||||
EXPR_ERR = "no expressions allowed in axis spec, only names and literals."
|
||||
CF_ERR = "Invalid axis specification for a C/Fortran contiguous array."
|
||||
ERR_UNINITIALIZED = ("Cannot check if memoryview %s is initialized without the "
|
||||
"GIL, consider using initializedcheck(False)")
|
||||
|
||||
|
||||
def concat_flags(*flags):
|
||||
return "(%s)" % "|".join(flags)
|
||||
|
||||
|
||||
format_flag = "PyBUF_FORMAT"
|
||||
|
||||
memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT)"
|
||||
memview_full_access = "PyBUF_FULL_RO"
|
||||
#memview_strided_access = "PyBUF_STRIDED_RO"
|
||||
memview_strided_access = "PyBUF_RECORDS_RO"
|
||||
|
||||
MEMVIEW_DIRECT = '__Pyx_MEMVIEW_DIRECT'
|
||||
MEMVIEW_PTR = '__Pyx_MEMVIEW_PTR'
|
||||
MEMVIEW_FULL = '__Pyx_MEMVIEW_FULL'
|
||||
MEMVIEW_CONTIG = '__Pyx_MEMVIEW_CONTIG'
|
||||
MEMVIEW_STRIDED= '__Pyx_MEMVIEW_STRIDED'
|
||||
MEMVIEW_FOLLOW = '__Pyx_MEMVIEW_FOLLOW'
|
||||
|
||||
_spec_to_const = {
|
||||
'direct' : MEMVIEW_DIRECT,
|
||||
'ptr' : MEMVIEW_PTR,
|
||||
'full' : MEMVIEW_FULL,
|
||||
'contig' : MEMVIEW_CONTIG,
|
||||
'strided': MEMVIEW_STRIDED,
|
||||
'follow' : MEMVIEW_FOLLOW,
|
||||
}
|
||||
|
||||
_spec_to_abbrev = {
|
||||
'direct' : 'd',
|
||||
'ptr' : 'p',
|
||||
'full' : 'f',
|
||||
'contig' : 'c',
|
||||
'strided' : 's',
|
||||
'follow' : '_',
|
||||
}
|
||||
|
||||
memslice_entry_init = "{ 0, 0, { 0 }, { 0 }, { 0 } }"
|
||||
|
||||
memview_name = u'memoryview'
|
||||
memview_typeptr_cname = '__pyx_memoryview_type'
|
||||
memview_objstruct_cname = '__pyx_memoryview_obj'
|
||||
memviewslice_cname = u'__Pyx_memviewslice'
|
||||
|
||||
|
||||
def put_init_entry(mv_cname, code):
|
||||
code.putln("%s.data = NULL;" % mv_cname)
|
||||
code.putln("%s.memview = NULL;" % mv_cname)
|
||||
|
||||
|
||||
#def axes_to_str(axes):
|
||||
# return "".join([access[0].upper()+packing[0] for (access, packing) in axes])
|
||||
|
||||
|
||||
def put_acquire_memoryviewslice(lhs_cname, lhs_type, lhs_pos, rhs, code,
|
||||
have_gil=False, first_assignment=True):
|
||||
"We can avoid decreffing the lhs if we know it is the first assignment"
|
||||
assert rhs.type.is_memoryviewslice
|
||||
|
||||
pretty_rhs = rhs.result_in_temp() or rhs.is_simple()
|
||||
if pretty_rhs:
|
||||
rhstmp = rhs.result()
|
||||
else:
|
||||
rhstmp = code.funcstate.allocate_temp(lhs_type, manage_ref=False)
|
||||
code.putln("%s = %s;" % (rhstmp, rhs.result_as(lhs_type)))
|
||||
|
||||
# Allow uninitialized assignment
|
||||
#code.putln(code.put_error_if_unbound(lhs_pos, rhs.entry))
|
||||
put_assign_to_memviewslice(lhs_cname, rhs, rhstmp, lhs_type, code,
|
||||
have_gil=have_gil, first_assignment=first_assignment)
|
||||
|
||||
if not pretty_rhs:
|
||||
code.funcstate.release_temp(rhstmp)
|
||||
|
||||
|
||||
def put_assign_to_memviewslice(lhs_cname, rhs, rhs_cname, memviewslicetype, code,
|
||||
have_gil=False, first_assignment=False):
|
||||
if not first_assignment:
|
||||
code.put_xdecref_memoryviewslice(lhs_cname, have_gil=have_gil)
|
||||
|
||||
if not rhs.result_in_temp():
|
||||
rhs.make_owned_memoryviewslice(code)
|
||||
|
||||
code.putln("%s = %s;" % (lhs_cname, rhs_cname))
|
||||
|
||||
|
||||
def get_buf_flags(specs):
|
||||
is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
if is_c_contig:
|
||||
return memview_c_contiguous
|
||||
elif is_f_contig:
|
||||
return memview_f_contiguous
|
||||
|
||||
access, packing = zip(*specs)
|
||||
|
||||
if 'full' in access or 'ptr' in access:
|
||||
return memview_full_access
|
||||
else:
|
||||
return memview_strided_access
|
||||
|
||||
|
||||
def insert_newaxes(memoryviewtype, n):
|
||||
axes = [('direct', 'strided')] * n
|
||||
axes.extend(memoryviewtype.axes)
|
||||
return PyrexTypes.MemoryViewSliceType(memoryviewtype.dtype, axes)
|
||||
|
||||
|
||||
def broadcast_types(src, dst):
|
||||
n = abs(src.ndim - dst.ndim)
|
||||
if src.ndim < dst.ndim:
|
||||
return insert_newaxes(src, n), dst
|
||||
else:
|
||||
return src, insert_newaxes(dst, n)
|
||||
|
||||
|
||||
def valid_memslice_dtype(dtype, i=0):
|
||||
"""
|
||||
Return whether type dtype can be used as the base type of a
|
||||
memoryview slice.
|
||||
|
||||
We support structs, numeric types and objects
|
||||
"""
|
||||
if dtype.is_complex and dtype.real_type.is_int:
|
||||
return False
|
||||
|
||||
if dtype is PyrexTypes.c_bint_type:
|
||||
return False
|
||||
|
||||
if dtype.is_struct and dtype.kind == 'struct':
|
||||
for member in dtype.scope.var_entries:
|
||||
if not valid_memslice_dtype(member.type):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return (
|
||||
dtype.is_error or
|
||||
# Pointers are not valid (yet)
|
||||
# (dtype.is_ptr and valid_memslice_dtype(dtype.base_type)) or
|
||||
(dtype.is_array and i < 8 and
|
||||
valid_memslice_dtype(dtype.base_type, i + 1)) or
|
||||
dtype.is_numeric or
|
||||
dtype.is_pyobject or
|
||||
dtype.is_fused or # accept this as it will be replaced by specializations later
|
||||
(dtype.is_typedef and valid_memslice_dtype(dtype.typedef_base_type))
|
||||
)
|
||||
|
||||
|
||||
class MemoryViewSliceBufferEntry(Buffer.BufferEntry):
|
||||
"""
|
||||
May be used during code generation time to be queried for
|
||||
shape/strides/suboffsets attributes, or to perform indexing or slicing.
|
||||
"""
|
||||
def __init__(self, entry):
|
||||
self.entry = entry
|
||||
self.type = entry.type
|
||||
self.cname = entry.cname
|
||||
|
||||
self.buf_ptr = "%s.data" % self.cname
|
||||
|
||||
dtype = self.entry.type.dtype
|
||||
self.buf_ptr_type = PyrexTypes.CPtrType(dtype)
|
||||
self.init_attributes()
|
||||
|
||||
def get_buf_suboffsetvars(self):
|
||||
return self._for_all_ndim("%s.suboffsets[%d]")
|
||||
|
||||
def get_buf_stridevars(self):
|
||||
return self._for_all_ndim("%s.strides[%d]")
|
||||
|
||||
def get_buf_shapevars(self):
|
||||
return self._for_all_ndim("%s.shape[%d]")
|
||||
|
||||
def generate_buffer_lookup_code(self, code, index_cnames):
|
||||
axes = [(dim, index_cnames[dim], access, packing)
|
||||
for dim, (access, packing) in enumerate(self.type.axes)]
|
||||
return self._generate_buffer_lookup_code(code, axes)
|
||||
|
||||
def _generate_buffer_lookup_code(self, code, axes, cast_result=True):
|
||||
"""
|
||||
Generate a single expression that indexes the memory view slice
|
||||
in each dimension.
|
||||
"""
|
||||
bufp = self.buf_ptr
|
||||
type_decl = self.type.dtype.empty_declaration_code()
|
||||
|
||||
for dim, index, access, packing in axes:
|
||||
shape = "%s.shape[%d]" % (self.cname, dim)
|
||||
stride = "%s.strides[%d]" % (self.cname, dim)
|
||||
suboffset = "%s.suboffsets[%d]" % (self.cname, dim)
|
||||
|
||||
flag = get_memoryview_flag(access, packing)
|
||||
|
||||
if flag in ("generic", "generic_contiguous"):
|
||||
# Note: we cannot do cast tricks to avoid stride multiplication
|
||||
# for generic_contiguous, as we may have to do (dtype *)
|
||||
# or (dtype **) arithmetic, we won't know which unless
|
||||
# we check suboffsets
|
||||
code.globalstate.use_utility_code(memviewslice_index_helpers)
|
||||
bufp = ('__pyx_memviewslice_index_full(%s, %s, %s, %s)' %
|
||||
(bufp, index, stride, suboffset))
|
||||
|
||||
elif flag == "indirect":
|
||||
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
||||
bufp = ("(*((char **) %s) + %s)" % (bufp, suboffset))
|
||||
|
||||
elif flag == "indirect_contiguous":
|
||||
# Note: we do char ** arithmetic
|
||||
bufp = "(*((char **) %s + %s) + %s)" % (bufp, index, suboffset)
|
||||
|
||||
elif flag == "strided":
|
||||
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
||||
|
||||
else:
|
||||
assert flag == 'contiguous', flag
|
||||
bufp = '((char *) (((%s *) %s) + %s))' % (type_decl, bufp, index)
|
||||
|
||||
bufp = '( /* dim=%d */ %s )' % (dim, bufp)
|
||||
|
||||
if cast_result:
|
||||
return "((%s *) %s)" % (type_decl, bufp)
|
||||
|
||||
return bufp
|
||||
|
||||
def generate_buffer_slice_code(self, code, indices, dst, have_gil,
|
||||
have_slices, directives):
|
||||
"""
|
||||
Slice a memoryviewslice.
|
||||
|
||||
indices - list of index nodes. If not a SliceNode, or NoneNode,
|
||||
then it must be coercible to Py_ssize_t
|
||||
|
||||
Simply call __pyx_memoryview_slice_memviewslice with the right
|
||||
arguments, unless the dimension is omitted or a bare ':', in which
|
||||
case we copy over the shape/strides/suboffsets attributes directly
|
||||
for that dimension.
|
||||
"""
|
||||
src = self.cname
|
||||
|
||||
code.putln("%(dst)s.data = %(src)s.data;" % locals())
|
||||
code.putln("%(dst)s.memview = %(src)s.memview;" % locals())
|
||||
code.put_incref_memoryviewslice(dst)
|
||||
|
||||
all_dimensions_direct = all(access == 'direct' for access, packing in self.type.axes)
|
||||
suboffset_dim_temp = []
|
||||
|
||||
def get_suboffset_dim():
|
||||
# create global temp variable at request
|
||||
if not suboffset_dim_temp:
|
||||
suboffset_dim = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
||||
code.putln("%s = -1;" % suboffset_dim)
|
||||
suboffset_dim_temp.append(suboffset_dim)
|
||||
return suboffset_dim_temp[0]
|
||||
|
||||
dim = -1
|
||||
new_ndim = 0
|
||||
for index in indices:
|
||||
if index.is_none:
|
||||
# newaxis
|
||||
for attrib, value in [('shape', 1), ('strides', 0), ('suboffsets', -1)]:
|
||||
code.putln("%s.%s[%d] = %d;" % (dst, attrib, new_ndim, value))
|
||||
|
||||
new_ndim += 1
|
||||
continue
|
||||
|
||||
dim += 1
|
||||
access, packing = self.type.axes[dim]
|
||||
|
||||
if isinstance(index, ExprNodes.SliceNode):
|
||||
# slice, unspecified dimension, or part of ellipsis
|
||||
d = dict(locals())
|
||||
for s in "start stop step".split():
|
||||
idx = getattr(index, s)
|
||||
have_idx = d['have_' + s] = not idx.is_none
|
||||
d[s] = idx.result() if have_idx else "0"
|
||||
|
||||
if not (d['have_start'] or d['have_stop'] or d['have_step']):
|
||||
# full slice (:), simply copy over the extent, stride
|
||||
# and suboffset. Also update suboffset_dim if needed
|
||||
d['access'] = access
|
||||
util_name = "SimpleSlice"
|
||||
else:
|
||||
util_name = "ToughSlice"
|
||||
d['error_goto'] = code.error_goto(index.pos)
|
||||
|
||||
new_ndim += 1
|
||||
else:
|
||||
# normal index
|
||||
idx = index.result()
|
||||
|
||||
indirect = access != 'direct'
|
||||
if indirect:
|
||||
generic = access == 'full'
|
||||
if new_ndim != 0:
|
||||
return error(index.pos,
|
||||
"All preceding dimensions must be "
|
||||
"indexed and not sliced")
|
||||
|
||||
d = dict(
|
||||
locals(),
|
||||
wraparound=int(directives['wraparound']),
|
||||
boundscheck=int(directives['boundscheck']),
|
||||
)
|
||||
if d['boundscheck']:
|
||||
d['error_goto'] = code.error_goto(index.pos)
|
||||
util_name = "SliceIndex"
|
||||
|
||||
_, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d)
|
||||
code.put(impl)
|
||||
|
||||
if suboffset_dim_temp:
|
||||
code.funcstate.release_temp(suboffset_dim_temp[0])
|
||||
|
||||
|
||||
def empty_slice(pos):
|
||||
none = ExprNodes.NoneNode(pos)
|
||||
return ExprNodes.SliceNode(pos, start=none,
|
||||
stop=none, step=none)
|
||||
|
||||
|
||||
def unellipsify(indices, ndim):
|
||||
result = []
|
||||
seen_ellipsis = False
|
||||
have_slices = False
|
||||
|
||||
newaxes = [newaxis for newaxis in indices if newaxis.is_none]
|
||||
n_indices = len(indices) - len(newaxes)
|
||||
|
||||
for index in indices:
|
||||
if isinstance(index, ExprNodes.EllipsisNode):
|
||||
have_slices = True
|
||||
full_slice = empty_slice(index.pos)
|
||||
|
||||
if seen_ellipsis:
|
||||
result.append(full_slice)
|
||||
else:
|
||||
nslices = ndim - n_indices + 1
|
||||
result.extend([full_slice] * nslices)
|
||||
seen_ellipsis = True
|
||||
else:
|
||||
have_slices = have_slices or index.is_slice or index.is_none
|
||||
result.append(index)
|
||||
|
||||
result_length = len(result) - len(newaxes)
|
||||
if result_length < ndim:
|
||||
have_slices = True
|
||||
nslices = ndim - result_length
|
||||
result.extend([empty_slice(indices[-1].pos)] * nslices)
|
||||
|
||||
return have_slices, result, newaxes
|
||||
|
||||
|
||||
def get_memoryview_flag(access, packing):
|
||||
if access == 'full' and packing in ('strided', 'follow'):
|
||||
return 'generic'
|
||||
elif access == 'full' and packing == 'contig':
|
||||
return 'generic_contiguous'
|
||||
elif access == 'ptr' and packing in ('strided', 'follow'):
|
||||
return 'indirect'
|
||||
elif access == 'ptr' and packing == 'contig':
|
||||
return 'indirect_contiguous'
|
||||
elif access == 'direct' and packing in ('strided', 'follow'):
|
||||
return 'strided'
|
||||
else:
|
||||
assert (access, packing) == ('direct', 'contig'), (access, packing)
|
||||
return 'contiguous'
|
||||
|
||||
|
||||
def get_is_contig_func_name(contig_type, ndim):
|
||||
assert contig_type in ('C', 'F')
|
||||
return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
|
||||
|
||||
|
||||
def get_is_contig_utility(contig_type, ndim):
|
||||
assert contig_type in ('C', 'F')
|
||||
C = dict(context, ndim=ndim, contig_type=contig_type)
|
||||
utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
|
||||
return utility
|
||||
|
||||
|
||||
def slice_iter(slice_type, slice_result, ndim, code):
|
||||
if slice_type.is_c_contig or slice_type.is_f_contig:
|
||||
return ContigSliceIter(slice_type, slice_result, ndim, code)
|
||||
else:
|
||||
return StridedSliceIter(slice_type, slice_result, ndim, code)
|
||||
|
||||
|
||||
class SliceIter(object):
|
||||
def __init__(self, slice_type, slice_result, ndim, code):
|
||||
self.slice_type = slice_type
|
||||
self.slice_result = slice_result
|
||||
self.code = code
|
||||
self.ndim = ndim
|
||||
|
||||
|
||||
class ContigSliceIter(SliceIter):
|
||||
def start_loops(self):
|
||||
code = self.code
|
||||
code.begin_block()
|
||||
|
||||
type_decl = self.slice_type.dtype.empty_declaration_code()
|
||||
|
||||
total_size = ' * '.join("%s.shape[%d]" % (self.slice_result, i)
|
||||
for i in range(self.ndim))
|
||||
code.putln("Py_ssize_t __pyx_temp_extent = %s;" % total_size)
|
||||
code.putln("Py_ssize_t __pyx_temp_idx;")
|
||||
code.putln("%s *__pyx_temp_pointer = (%s *) %s.data;" % (
|
||||
type_decl, type_decl, self.slice_result))
|
||||
code.putln("for (__pyx_temp_idx = 0; "
|
||||
"__pyx_temp_idx < __pyx_temp_extent; "
|
||||
"__pyx_temp_idx++) {")
|
||||
|
||||
return "__pyx_temp_pointer"
|
||||
|
||||
def end_loops(self):
|
||||
self.code.putln("__pyx_temp_pointer += 1;")
|
||||
self.code.putln("}")
|
||||
self.code.end_block()
|
||||
|
||||
|
||||
class StridedSliceIter(SliceIter):
|
||||
def start_loops(self):
|
||||
code = self.code
|
||||
code.begin_block()
|
||||
|
||||
for i in range(self.ndim):
|
||||
t = i, self.slice_result, i
|
||||
code.putln("Py_ssize_t __pyx_temp_extent_%d = %s.shape[%d];" % t)
|
||||
code.putln("Py_ssize_t __pyx_temp_stride_%d = %s.strides[%d];" % t)
|
||||
code.putln("char *__pyx_temp_pointer_%d;" % i)
|
||||
code.putln("Py_ssize_t __pyx_temp_idx_%d;" % i)
|
||||
|
||||
code.putln("__pyx_temp_pointer_0 = %s.data;" % self.slice_result)
|
||||
|
||||
for i in range(self.ndim):
|
||||
if i > 0:
|
||||
code.putln("__pyx_temp_pointer_%d = __pyx_temp_pointer_%d;" % (i, i - 1))
|
||||
|
||||
code.putln("for (__pyx_temp_idx_%d = 0; "
|
||||
"__pyx_temp_idx_%d < __pyx_temp_extent_%d; "
|
||||
"__pyx_temp_idx_%d++) {" % (i, i, i, i))
|
||||
|
||||
return "__pyx_temp_pointer_%d" % (self.ndim - 1)
|
||||
|
||||
def end_loops(self):
|
||||
code = self.code
|
||||
for i in range(self.ndim - 1, -1, -1):
|
||||
code.putln("__pyx_temp_pointer_%d += __pyx_temp_stride_%d;" % (i, i))
|
||||
code.putln("}")
|
||||
|
||||
code.end_block()
|
||||
|
||||
|
||||
def copy_c_or_fortran_cname(memview):
|
||||
if memview.is_c_contig:
|
||||
c_or_f = 'c'
|
||||
else:
|
||||
c_or_f = 'f'
|
||||
|
||||
return "__pyx_memoryview_copy_slice_%s_%s" % (
|
||||
memview.specialization_suffix(), c_or_f)
|
||||
|
||||
|
||||
def get_copy_new_utility(pos, from_memview, to_memview):
|
||||
if (from_memview.dtype != to_memview.dtype and
|
||||
not (from_memview.dtype.is_const and from_memview.dtype.const_base_type == to_memview.dtype)):
|
||||
error(pos, "dtypes must be the same!")
|
||||
return
|
||||
if len(from_memview.axes) != len(to_memview.axes):
|
||||
error(pos, "number of dimensions must be same")
|
||||
return
|
||||
if not (to_memview.is_c_contig or to_memview.is_f_contig):
|
||||
error(pos, "to_memview must be c or f contiguous.")
|
||||
return
|
||||
|
||||
for (access, packing) in from_memview.axes:
|
||||
if access != 'direct':
|
||||
error(pos, "cannot handle 'full' or 'ptr' access at this time.")
|
||||
return
|
||||
|
||||
if to_memview.is_c_contig:
|
||||
mode = 'c'
|
||||
contig_flag = memview_c_contiguous
|
||||
elif to_memview.is_f_contig:
|
||||
mode = 'fortran'
|
||||
contig_flag = memview_f_contiguous
|
||||
|
||||
return load_memview_c_utility(
|
||||
"CopyContentsUtility",
|
||||
context=dict(
|
||||
context,
|
||||
mode=mode,
|
||||
dtype_decl=to_memview.dtype.empty_declaration_code(),
|
||||
contig_flag=contig_flag,
|
||||
ndim=to_memview.ndim,
|
||||
func_cname=copy_c_or_fortran_cname(to_memview),
|
||||
dtype_is_object=int(to_memview.dtype.is_pyobject)),
|
||||
requires=[copy_contents_new_utility])
|
||||
|
||||
|
||||
def get_axes_specs(env, axes):
|
||||
'''
|
||||
get_axes_specs(env, axes) -> list of (access, packing) specs for each axis.
|
||||
access is one of 'full', 'ptr' or 'direct'
|
||||
packing is one of 'contig', 'strided' or 'follow'
|
||||
'''
|
||||
|
||||
cythonscope = env.global_scope().context.cython_scope
|
||||
cythonscope.load_cythonscope()
|
||||
viewscope = cythonscope.viewscope
|
||||
|
||||
access_specs = tuple([viewscope.lookup(name)
|
||||
for name in ('full', 'direct', 'ptr')])
|
||||
packing_specs = tuple([viewscope.lookup(name)
|
||||
for name in ('contig', 'strided', 'follow')])
|
||||
|
||||
is_f_contig, is_c_contig = False, False
|
||||
default_access, default_packing = 'direct', 'strided'
|
||||
cf_access, cf_packing = default_access, 'follow'
|
||||
|
||||
axes_specs = []
|
||||
# analyse all axes.
|
||||
for idx, axis in enumerate(axes):
|
||||
if not axis.start.is_none:
|
||||
raise CompileError(axis.start.pos, START_ERR)
|
||||
|
||||
if not axis.stop.is_none:
|
||||
raise CompileError(axis.stop.pos, STOP_ERR)
|
||||
|
||||
if axis.step.is_none:
|
||||
axes_specs.append((default_access, default_packing))
|
||||
|
||||
elif isinstance(axis.step, IntNode):
|
||||
# the packing for the ::1 axis is contiguous,
|
||||
# all others are cf_packing.
|
||||
if axis.step.compile_time_value(env) != 1:
|
||||
raise CompileError(axis.step.pos, STEP_ERR)
|
||||
|
||||
axes_specs.append((cf_access, 'cfcontig'))
|
||||
|
||||
elif isinstance(axis.step, (NameNode, AttributeNode)):
|
||||
entry = _get_resolved_spec(env, axis.step)
|
||||
if entry.name in view_constant_to_access_packing:
|
||||
axes_specs.append(view_constant_to_access_packing[entry.name])
|
||||
else:
|
||||
raise CompileError(axis.step.pos, INVALID_ERR)
|
||||
|
||||
else:
|
||||
raise CompileError(axis.step.pos, INVALID_ERR)
|
||||
|
||||
# First, find out if we have a ::1 somewhere
|
||||
contig_dim = 0
|
||||
is_contig = False
|
||||
for idx, (access, packing) in enumerate(axes_specs):
|
||||
if packing == 'cfcontig':
|
||||
if is_contig:
|
||||
raise CompileError(axis.step.pos, BOTH_CF_ERR)
|
||||
|
||||
contig_dim = idx
|
||||
axes_specs[idx] = (access, 'contig')
|
||||
is_contig = True
|
||||
|
||||
if is_contig:
|
||||
# We have a ::1 somewhere, see if we're C or Fortran contiguous
|
||||
if contig_dim == len(axes) - 1:
|
||||
is_c_contig = True
|
||||
else:
|
||||
is_f_contig = True
|
||||
|
||||
if contig_dim and not axes_specs[contig_dim - 1][0] in ('full', 'ptr'):
|
||||
raise CompileError(axes[contig_dim].pos,
|
||||
"Fortran contiguous specifier must follow an indirect dimension")
|
||||
|
||||
if is_c_contig:
|
||||
# Contiguous in the last dimension, find the last indirect dimension
|
||||
contig_dim = -1
|
||||
for idx, (access, packing) in enumerate(reversed(axes_specs)):
|
||||
if access in ('ptr', 'full'):
|
||||
contig_dim = len(axes) - idx - 1
|
||||
|
||||
# Replace 'strided' with 'follow' for any dimension following the last
|
||||
# indirect dimension, the first dimension or the dimension following
|
||||
# the ::1.
|
||||
# int[::indirect, ::1, :, :]
|
||||
# ^ ^
|
||||
# int[::indirect, :, :, ::1]
|
||||
# ^ ^
|
||||
start = contig_dim + 1
|
||||
stop = len(axes) - is_c_contig
|
||||
for idx, (access, packing) in enumerate(axes_specs[start:stop]):
|
||||
idx = contig_dim + 1 + idx
|
||||
if access != 'direct':
|
||||
raise CompileError(axes[idx].pos,
|
||||
"Indirect dimension may not follow "
|
||||
"Fortran contiguous dimension")
|
||||
if packing == 'contig':
|
||||
raise CompileError(axes[idx].pos,
|
||||
"Dimension may not be contiguous")
|
||||
axes_specs[idx] = (access, cf_packing)
|
||||
|
||||
if is_c_contig:
|
||||
# For C contiguity, we need to fix the 'contig' dimension
|
||||
# after the loop
|
||||
a, p = axes_specs[-1]
|
||||
axes_specs[-1] = a, 'contig'
|
||||
|
||||
validate_axes_specs([axis.start.pos for axis in axes],
|
||||
axes_specs,
|
||||
is_c_contig,
|
||||
is_f_contig)
|
||||
|
||||
return axes_specs
|
||||
|
||||
|
||||
def validate_axes(pos, axes):
|
||||
if len(axes) >= Options.buffer_max_dims:
|
||||
error(pos, "More dimensions than the maximum number"
|
||||
" of buffer dimensions were used.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_cf_contig(specs):
|
||||
is_c_contig = is_f_contig = False
|
||||
|
||||
if len(specs) == 1 and specs == [('direct', 'contig')]:
|
||||
is_c_contig = True
|
||||
|
||||
elif (specs[-1] == ('direct','contig') and
|
||||
all(axis == ('direct','follow') for axis in specs[:-1])):
|
||||
# c_contiguous: 'follow', 'follow', ..., 'follow', 'contig'
|
||||
is_c_contig = True
|
||||
|
||||
elif (len(specs) > 1 and
|
||||
specs[0] == ('direct','contig') and
|
||||
all(axis == ('direct','follow') for axis in specs[1:])):
|
||||
# f_contiguous: 'contig', 'follow', 'follow', ..., 'follow'
|
||||
is_f_contig = True
|
||||
|
||||
return is_c_contig, is_f_contig
|
||||
|
||||
|
||||
def get_mode(specs):
|
||||
is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
if is_c_contig:
|
||||
return 'c'
|
||||
elif is_f_contig:
|
||||
return 'fortran'
|
||||
|
||||
for access, packing in specs:
|
||||
if access in ('ptr', 'full'):
|
||||
return 'full'
|
||||
|
||||
return 'strided'
|
||||
|
||||
view_constant_to_access_packing = {
|
||||
'generic': ('full', 'strided'),
|
||||
'strided': ('direct', 'strided'),
|
||||
'indirect': ('ptr', 'strided'),
|
||||
'generic_contiguous': ('full', 'contig'),
|
||||
'contiguous': ('direct', 'contig'),
|
||||
'indirect_contiguous': ('ptr', 'contig'),
|
||||
}
|
||||
|
||||
def validate_axes_specs(positions, specs, is_c_contig, is_f_contig):
|
||||
|
||||
packing_specs = ('contig', 'strided', 'follow')
|
||||
access_specs = ('direct', 'ptr', 'full')
|
||||
|
||||
# is_c_contig, is_f_contig = is_cf_contig(specs)
|
||||
|
||||
has_contig = has_follow = has_strided = has_generic_contig = False
|
||||
|
||||
last_indirect_dimension = -1
|
||||
for idx, (access, packing) in enumerate(specs):
|
||||
if access == 'ptr':
|
||||
last_indirect_dimension = idx
|
||||
|
||||
for idx, (pos, (access, packing)) in enumerate(zip(positions, specs)):
|
||||
|
||||
if not (access in access_specs and
|
||||
packing in packing_specs):
|
||||
raise CompileError(pos, "Invalid axes specification.")
|
||||
|
||||
if packing == 'strided':
|
||||
has_strided = True
|
||||
elif packing == 'contig':
|
||||
if has_contig:
|
||||
raise CompileError(pos, "Only one direct contiguous "
|
||||
"axis may be specified.")
|
||||
|
||||
valid_contig_dims = last_indirect_dimension + 1, len(specs) - 1
|
||||
if idx not in valid_contig_dims and access != 'ptr':
|
||||
if last_indirect_dimension + 1 != len(specs) - 1:
|
||||
dims = "dimensions %d and %d" % valid_contig_dims
|
||||
else:
|
||||
dims = "dimension %d" % valid_contig_dims[0]
|
||||
|
||||
raise CompileError(pos, "Only %s may be contiguous and direct" % dims)
|
||||
|
||||
has_contig = access != 'ptr'
|
||||
elif packing == 'follow':
|
||||
if has_strided:
|
||||
raise CompileError(pos, "A memoryview cannot have both follow and strided axis specifiers.")
|
||||
if not (is_c_contig or is_f_contig):
|
||||
raise CompileError(pos, "Invalid use of the follow specifier.")
|
||||
|
||||
if access in ('ptr', 'full'):
|
||||
has_strided = False
|
||||
|
||||
def _get_resolved_spec(env, spec):
|
||||
# spec must be a NameNode or an AttributeNode
|
||||
if isinstance(spec, NameNode):
|
||||
return _resolve_NameNode(env, spec)
|
||||
elif isinstance(spec, AttributeNode):
|
||||
return _resolve_AttributeNode(env, spec)
|
||||
else:
|
||||
raise CompileError(spec.pos, INVALID_ERR)
|
||||
|
||||
def _resolve_NameNode(env, node):
|
||||
try:
|
||||
resolved_name = env.lookup(node.name).name
|
||||
except AttributeError:
|
||||
raise CompileError(node.pos, INVALID_ERR)
|
||||
|
||||
viewscope = env.global_scope().context.cython_scope.viewscope
|
||||
entry = viewscope.lookup(resolved_name)
|
||||
if entry is None:
|
||||
raise CompileError(node.pos, NOT_CIMPORTED_ERR)
|
||||
|
||||
return entry
|
||||
|
||||
def _resolve_AttributeNode(env, node):
|
||||
path = []
|
||||
while isinstance(node, AttributeNode):
|
||||
path.insert(0, node.attribute)
|
||||
node = node.obj
|
||||
if isinstance(node, NameNode):
|
||||
path.insert(0, node.name)
|
||||
else:
|
||||
raise CompileError(node.pos, EXPR_ERR)
|
||||
modnames = path[:-1]
|
||||
# must be at least 1 module name, o/w not an AttributeNode.
|
||||
assert modnames
|
||||
|
||||
scope = env
|
||||
for modname in modnames:
|
||||
mod = scope.lookup(modname)
|
||||
if not mod or not mod.as_module:
|
||||
raise CompileError(
|
||||
node.pos, "undeclared name not builtin: %s" % modname)
|
||||
scope = mod.as_module
|
||||
|
||||
entry = scope.lookup(path[-1])
|
||||
if not entry:
|
||||
raise CompileError(node.pos, "No such attribute '%s'" % path[-1])
|
||||
|
||||
return entry
|
||||
|
||||
#
|
||||
### Utility loading
|
||||
#
|
||||
|
||||
def load_memview_cy_utility(util_code_name, context=None, **kwargs):
|
||||
return CythonUtilityCode.load(util_code_name, "MemoryView.pyx",
|
||||
context=context, **kwargs)
|
||||
|
||||
def load_memview_c_utility(util_code_name, context=None, **kwargs):
|
||||
if context is None:
|
||||
return UtilityCode.load(util_code_name, "MemoryView_C.c", **kwargs)
|
||||
else:
|
||||
return TempitaUtilityCode.load(util_code_name, "MemoryView_C.c",
|
||||
context=context, **kwargs)
|
||||
|
||||
def use_cython_array_utility_code(env):
|
||||
cython_scope = env.global_scope().context.cython_scope
|
||||
cython_scope.load_cythonscope()
|
||||
cython_scope.viewscope.lookup('array_cwrapper').used = True
|
||||
|
||||
context = {
|
||||
'memview_struct_name': memview_objstruct_cname,
|
||||
'max_dims': Options.buffer_max_dims,
|
||||
'memviewslice_name': memviewslice_cname,
|
||||
'memslice_init': memslice_entry_init,
|
||||
}
|
||||
memviewslice_declare_code = load_memview_c_utility(
|
||||
"MemviewSliceStruct",
|
||||
context=context,
|
||||
requires=[])
|
||||
|
||||
atomic_utility = load_memview_c_utility("Atomics", context)
|
||||
|
||||
memviewslice_init_code = load_memview_c_utility(
|
||||
"MemviewSliceInit",
|
||||
context=dict(context, BUF_MAX_NDIMS=Options.buffer_max_dims),
|
||||
requires=[memviewslice_declare_code,
|
||||
atomic_utility],
|
||||
)
|
||||
|
||||
memviewslice_index_helpers = load_memview_c_utility("MemviewSliceIndex")
|
||||
|
||||
typeinfo_to_format_code = load_memview_cy_utility(
|
||||
"BufferFormatFromTypeInfo", requires=[Buffer._typeinfo_to_format_code])
|
||||
|
||||
is_contig_utility = load_memview_c_utility("MemviewSliceIsContig", context)
|
||||
overlapping_utility = load_memview_c_utility("OverlappingSlices", context)
|
||||
copy_contents_new_utility = load_memview_c_utility(
|
||||
"MemviewSliceCopyTemplate",
|
||||
context,
|
||||
requires=[], # require cython_array_utility_code
|
||||
)
|
||||
|
||||
view_utility_code = load_memview_cy_utility(
|
||||
"View.MemoryView",
|
||||
context=context,
|
||||
requires=[Buffer.GetAndReleaseBufferUtilityCode(),
|
||||
Buffer.buffer_struct_declare_code,
|
||||
Buffer.buffer_formats_declare_code,
|
||||
memviewslice_init_code,
|
||||
is_contig_utility,
|
||||
overlapping_utility,
|
||||
copy_contents_new_utility,
|
||||
ModuleNode.capsule_utility_code],
|
||||
)
|
||||
view_utility_whitelist = ('array', 'memoryview', 'array_cwrapper',
|
||||
'generic', 'strided', 'indirect', 'contiguous',
|
||||
'indirect_contiguous')
|
||||
|
||||
memviewslice_declare_code.requires.append(view_utility_code)
|
||||
copy_contents_new_utility.requires.append(view_utility_code)
|
3216
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ModuleNode.py
Normal file
3216
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/ModuleNode.py
Normal file
File diff suppressed because it is too large
Load Diff
162
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Naming.py
Normal file
162
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Naming.py
Normal file
@ -0,0 +1,162 @@
|
||||
#
|
||||
# C naming conventions
|
||||
#
|
||||
#
|
||||
# Prefixes for generating C names.
|
||||
# Collected here to facilitate ensuring uniqueness.
|
||||
#
|
||||
|
||||
pyrex_prefix = "__pyx_"
|
||||
|
||||
|
||||
codewriter_temp_prefix = pyrex_prefix + "t_"
|
||||
|
||||
temp_prefix = u"__cyt_"
|
||||
|
||||
builtin_prefix = pyrex_prefix + "builtin_"
|
||||
arg_prefix = pyrex_prefix + "arg_"
|
||||
funcdoc_prefix = pyrex_prefix + "doc_"
|
||||
enum_prefix = pyrex_prefix + "e_"
|
||||
func_prefix = pyrex_prefix + "f_"
|
||||
func_prefix_api = pyrex_prefix + "api_f_"
|
||||
pyfunc_prefix = pyrex_prefix + "pf_"
|
||||
pywrap_prefix = pyrex_prefix + "pw_"
|
||||
genbody_prefix = pyrex_prefix + "gb_"
|
||||
gstab_prefix = pyrex_prefix + "getsets_"
|
||||
prop_get_prefix = pyrex_prefix + "getprop_"
|
||||
const_prefix = pyrex_prefix + "k_"
|
||||
py_const_prefix = pyrex_prefix + "kp_"
|
||||
label_prefix = pyrex_prefix + "L"
|
||||
pymethdef_prefix = pyrex_prefix + "mdef_"
|
||||
method_wrapper_prefix = pyrex_prefix + "specialmethod_"
|
||||
methtab_prefix = pyrex_prefix + "methods_"
|
||||
memtab_prefix = pyrex_prefix + "members_"
|
||||
objstruct_prefix = pyrex_prefix + "obj_"
|
||||
typeptr_prefix = pyrex_prefix + "ptype_"
|
||||
prop_set_prefix = pyrex_prefix + "setprop_"
|
||||
type_prefix = pyrex_prefix + "t_"
|
||||
typeobj_prefix = pyrex_prefix + "type_"
|
||||
var_prefix = pyrex_prefix + "v_"
|
||||
varptr_prefix = pyrex_prefix + "vp_"
|
||||
varptr_prefix_api = pyrex_prefix + "api_vp_"
|
||||
wrapperbase_prefix= pyrex_prefix + "wrapperbase_"
|
||||
pybuffernd_prefix = pyrex_prefix + "pybuffernd_"
|
||||
pybufferstruct_prefix = pyrex_prefix + "pybuffer_"
|
||||
vtable_prefix = pyrex_prefix + "vtable_"
|
||||
vtabptr_prefix = pyrex_prefix + "vtabptr_"
|
||||
vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
|
||||
opt_arg_prefix = pyrex_prefix + "opt_args_"
|
||||
convert_func_prefix = pyrex_prefix + "convert_"
|
||||
closure_scope_prefix = pyrex_prefix + "scope_"
|
||||
closure_class_prefix = pyrex_prefix + "scope_struct_"
|
||||
lambda_func_prefix = pyrex_prefix + "lambda_"
|
||||
module_is_main = pyrex_prefix + "module_is_main_"
|
||||
defaults_struct_prefix = pyrex_prefix + "defaults"
|
||||
dynamic_args_cname = pyrex_prefix + "dynamic_args"
|
||||
|
||||
interned_prefixes = {
|
||||
'str': pyrex_prefix + "n_",
|
||||
'int': pyrex_prefix + "int_",
|
||||
'float': pyrex_prefix + "float_",
|
||||
'tuple': pyrex_prefix + "tuple_",
|
||||
'codeobj': pyrex_prefix + "codeobj_",
|
||||
'slice': pyrex_prefix + "slice_",
|
||||
'ustring': pyrex_prefix + "ustring_",
|
||||
'umethod': pyrex_prefix + "umethod_",
|
||||
}
|
||||
|
||||
ctuple_type_prefix = pyrex_prefix + "ctuple_"
|
||||
args_cname = pyrex_prefix + "args"
|
||||
generator_cname = pyrex_prefix + "generator"
|
||||
sent_value_cname = pyrex_prefix + "sent_value"
|
||||
pykwdlist_cname = pyrex_prefix + "pyargnames"
|
||||
obj_base_cname = pyrex_prefix + "base"
|
||||
builtins_cname = pyrex_prefix + "b"
|
||||
preimport_cname = pyrex_prefix + "i"
|
||||
moddict_cname = pyrex_prefix + "d"
|
||||
dummy_cname = pyrex_prefix + "dummy"
|
||||
filename_cname = pyrex_prefix + "filename"
|
||||
modulename_cname = pyrex_prefix + "modulename"
|
||||
filetable_cname = pyrex_prefix + "f"
|
||||
intern_tab_cname = pyrex_prefix + "intern_tab"
|
||||
kwds_cname = pyrex_prefix + "kwds"
|
||||
lineno_cname = pyrex_prefix + "lineno"
|
||||
clineno_cname = pyrex_prefix + "clineno"
|
||||
cfilenm_cname = pyrex_prefix + "cfilenm"
|
||||
local_tstate_cname = pyrex_prefix + "tstate"
|
||||
module_cname = pyrex_prefix + "m"
|
||||
moddoc_cname = pyrex_prefix + "mdoc"
|
||||
methtable_cname = pyrex_prefix + "methods"
|
||||
retval_cname = pyrex_prefix + "r"
|
||||
reqd_kwds_cname = pyrex_prefix + "reqd_kwds"
|
||||
self_cname = pyrex_prefix + "self"
|
||||
stringtab_cname = pyrex_prefix + "string_tab"
|
||||
vtabslot_cname = pyrex_prefix + "vtab"
|
||||
c_api_tab_cname = pyrex_prefix + "c_api_tab"
|
||||
gilstate_cname = pyrex_prefix + "state"
|
||||
skip_dispatch_cname = pyrex_prefix + "skip_dispatch"
|
||||
empty_tuple = pyrex_prefix + "empty_tuple"
|
||||
empty_bytes = pyrex_prefix + "empty_bytes"
|
||||
empty_unicode = pyrex_prefix + "empty_unicode"
|
||||
print_function = pyrex_prefix + "print"
|
||||
print_function_kwargs = pyrex_prefix + "print_kwargs"
|
||||
cleanup_cname = pyrex_prefix + "module_cleanup"
|
||||
pymoduledef_cname = pyrex_prefix + "moduledef"
|
||||
pymoduledef_slots_cname = pyrex_prefix + "moduledef_slots"
|
||||
pymodinit_module_arg = pyrex_prefix + "pyinit_module"
|
||||
pymodule_create_func_cname = pyrex_prefix + "pymod_create"
|
||||
pymodule_exec_func_cname = pyrex_prefix + "pymod_exec"
|
||||
optional_args_cname = pyrex_prefix + "optional_args"
|
||||
import_star = pyrex_prefix + "import_star"
|
||||
import_star_set = pyrex_prefix + "import_star_set"
|
||||
outer_scope_cname= pyrex_prefix + "outer_scope"
|
||||
cur_scope_cname = pyrex_prefix + "cur_scope"
|
||||
enc_scope_cname = pyrex_prefix + "enc_scope"
|
||||
frame_cname = pyrex_prefix + "frame"
|
||||
frame_code_cname = pyrex_prefix + "frame_code"
|
||||
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
|
||||
fused_func_prefix = pyrex_prefix + 'fuse_'
|
||||
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
|
||||
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
|
||||
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
|
||||
type_dict_guard_temp = pyrex_prefix + "type_dict_guard"
|
||||
cython_runtime_cname = pyrex_prefix + "cython_runtime"
|
||||
|
||||
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
|
||||
global_code_object_cache_insert = pyrex_prefix + 'insert_code_object'
|
||||
|
||||
genexpr_id_ref = 'genexpr'
|
||||
freelist_name = 'freelist'
|
||||
freecount_name = 'freecount'
|
||||
|
||||
line_c_macro = "__LINE__"
|
||||
|
||||
file_c_macro = "__FILE__"
|
||||
|
||||
extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
|
||||
|
||||
exc_type_name = pyrex_prefix + "exc_type"
|
||||
exc_value_name = pyrex_prefix + "exc_value"
|
||||
exc_tb_name = pyrex_prefix + "exc_tb"
|
||||
exc_lineno_name = pyrex_prefix + "exc_lineno"
|
||||
|
||||
parallel_exc_type = pyrex_prefix + "parallel_exc_type"
|
||||
parallel_exc_value = pyrex_prefix + "parallel_exc_value"
|
||||
parallel_exc_tb = pyrex_prefix + "parallel_exc_tb"
|
||||
parallel_filename = pyrex_prefix + "parallel_filename"
|
||||
parallel_lineno = pyrex_prefix + "parallel_lineno"
|
||||
parallel_clineno = pyrex_prefix + "parallel_clineno"
|
||||
parallel_why = pyrex_prefix + "parallel_why"
|
||||
|
||||
exc_vars = (exc_type_name, exc_value_name, exc_tb_name)
|
||||
|
||||
api_name = pyrex_prefix + "capi__"
|
||||
|
||||
h_guard_prefix = "__PYX_HAVE__"
|
||||
api_guard_prefix = "__PYX_HAVE_API__"
|
||||
api_func_guard = "__PYX_HAVE_API_FUNC_"
|
||||
|
||||
PYX_NAN = "__PYX_NAN()"
|
||||
|
||||
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
|
||||
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
|
9450
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Nodes.py
Normal file
9450
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Nodes.py
Normal file
File diff suppressed because it is too large
Load Diff
4857
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Optimize.py
Normal file
4857
kivy_venv/lib/python3.11/site-packages/Cython/Compiler/Optimize.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,552 @@
|
||||
#
|
||||
# Cython - Compilation-wide options and pragma declarations
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
class ShouldBeFromDirective(object):
|
||||
|
||||
known_directives = []
|
||||
|
||||
def __init__(self, options_name, directive_name=None, disallow=False):
|
||||
self.options_name = options_name
|
||||
self.directive_name = directive_name or options_name
|
||||
self.disallow = disallow
|
||||
self.known_directives.append(self)
|
||||
|
||||
def __nonzero__(self):
|
||||
self._bad_access()
|
||||
|
||||
def __int__(self):
|
||||
self._bad_access()
|
||||
|
||||
def _bad_access(self):
|
||||
raise RuntimeError(repr(self))
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"Illegal access of '%s' from Options module rather than directive '%s'"
|
||||
% (self.options_name, self.directive_name))
|
||||
|
||||
|
||||
"""
|
||||
The members of this module are documented using autodata in
|
||||
Cython/docs/src/reference/compilation.rst.
|
||||
See http://www.sphinx-doc.org/en/master/ext/autodoc.html#directive-autoattribute
|
||||
for how autodata works.
|
||||
Descriptions of those members should start with a #:
|
||||
Donc forget to keep the docs in sync by removing and adding
|
||||
the members in both this file and the .rst file.
|
||||
"""
|
||||
|
||||
#: Whether or not to include docstring in the Python extension. If False, the binary size
|
||||
#: will be smaller, but the ``__doc__`` attribute of any class or function will be an
|
||||
#: empty string.
|
||||
docstrings = True
|
||||
|
||||
#: Embed the source code position in the docstrings of functions and classes.
|
||||
embed_pos_in_docstring = False
|
||||
|
||||
#: Copy the original source code line by line into C code comments
|
||||
#: in the generated code file to help with understanding the output.
|
||||
#: This is also required for coverage analysis.
|
||||
emit_code_comments = True
|
||||
|
||||
# undocumented
|
||||
pre_import = None
|
||||
|
||||
#: Decref global variables in each module on exit for garbage collection.
|
||||
#: 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects
|
||||
#: Mostly for reducing noise in Valgrind as it typically executes at process exit
|
||||
#: (when all memory will be reclaimed anyways).
|
||||
#: Note that directly or indirectly executed cleanup code that makes use of global
|
||||
#: variables or types may no longer be safe when enabling the respective level since
|
||||
#: there is no guaranteed order in which the (reference counted) objects will
|
||||
#: be cleaned up. The order can change due to live references and reference cycles.
|
||||
generate_cleanup_code = False
|
||||
|
||||
#: Should tp_clear() set object fields to None instead of clearing them to NULL?
|
||||
clear_to_none = True
|
||||
|
||||
#: Generate an annotated HTML version of the input source files for debugging and optimisation purposes.
|
||||
#: This has the same effect as the ``annotate`` argument in :func:`cythonize`.
|
||||
annotate = False
|
||||
|
||||
# When annotating source files in HTML, include coverage information from
|
||||
# this file.
|
||||
annotate_coverage_xml = None
|
||||
|
||||
#: This will abort the compilation on the first error occurred rather than trying
|
||||
#: to keep going and printing further error messages.
|
||||
fast_fail = False
|
||||
|
||||
#: Turn all warnings into errors.
|
||||
warning_errors = False
|
||||
|
||||
#: Make unknown names an error. Python raises a NameError when
|
||||
#: encountering unknown names at runtime, whereas this option makes
|
||||
#: them a compile time error. If you want full Python compatibility,
|
||||
#: you should disable this option and also 'cache_builtins'.
|
||||
error_on_unknown_names = True
|
||||
|
||||
#: Make uninitialized local variable reference a compile time error.
|
||||
#: Python raises UnboundLocalError at runtime, whereas this option makes
|
||||
#: them a compile time error. Note that this option affects only variables
|
||||
#: of "python object" type.
|
||||
error_on_uninitialized = True
|
||||
|
||||
#: This will convert statements of the form ``for i in range(...)``
|
||||
#: to ``for i from ...`` when ``i`` is a C integer type, and the direction
|
||||
#: (i.e. sign of step) can be determined.
|
||||
#: WARNING: This may change the semantics if the range causes assignment to
|
||||
#: i to overflow. Specifically, if this option is set, an error will be
|
||||
#: raised before the loop is entered, whereas without this option the loop
|
||||
#: will execute until an overflowing value is encountered.
|
||||
convert_range = True
|
||||
|
||||
#: Perform lookups on builtin names only once, at module initialisation
|
||||
#: time. This will prevent the module from getting imported if a
|
||||
#: builtin name that it uses cannot be found during initialisation.
|
||||
#: Default is True.
|
||||
#: Note that some legacy builtins are automatically remapped
|
||||
#: from their Python 2 names to their Python 3 names by Cython
|
||||
#: when building in Python 3.x,
|
||||
#: so that they do not get in the way even if this option is enabled.
|
||||
cache_builtins = True
|
||||
|
||||
#: Generate branch prediction hints to speed up error handling etc.
|
||||
gcc_branch_hints = True
|
||||
|
||||
#: Enable this to allow one to write ``your_module.foo = ...`` to overwrite the
|
||||
#: definition if the cpdef function foo, at the cost of an extra dictionary
|
||||
#: lookup on every call.
|
||||
#: If this is false it generates only the Python wrapper and no override check.
|
||||
lookup_module_cpdef = False
|
||||
|
||||
#: Whether or not to embed the Python interpreter, for use in making a
|
||||
#: standalone executable or calling from external libraries.
|
||||
#: This will provide a C function which initialises the interpreter and
|
||||
#: executes the body of this module.
|
||||
#: See `this demo <https://github.com/cython/cython/tree/master/Demos/embed>`_
|
||||
#: for a concrete example.
|
||||
#: If true, the initialisation function is the C main() function, but
|
||||
#: this option can also be set to a non-empty string to provide a function name explicitly.
|
||||
#: Default is False.
|
||||
embed = None
|
||||
|
||||
# In previous iterations of Cython, globals() gave the first non-Cython module
|
||||
# globals in the call stack. Sage relies on this behavior for variable injection.
|
||||
old_style_globals = ShouldBeFromDirective('old_style_globals')
|
||||
|
||||
#: Allows cimporting from a pyx file without a pxd file.
|
||||
cimport_from_pyx = False
|
||||
|
||||
#: Maximum number of dimensions for buffers -- set lower than number of
|
||||
#: dimensions in numpy, as
|
||||
#: slices are passed by value and involve a lot of copying.
|
||||
buffer_max_dims = 8
|
||||
|
||||
#: Number of function closure instances to keep in a freelist (0: no freelists)
|
||||
closure_freelist_size = 8
|
||||
|
||||
|
||||
def get_directive_defaults():
|
||||
# To add an item to this list, all accesses should be changed to use the new
|
||||
# directive, and the global option itself should be set to an instance of
|
||||
# ShouldBeFromDirective.
|
||||
for old_option in ShouldBeFromDirective.known_directives:
|
||||
value = globals().get(old_option.options_name)
|
||||
assert old_option.directive_name in _directive_defaults
|
||||
if not isinstance(value, ShouldBeFromDirective):
|
||||
if old_option.disallow:
|
||||
raise RuntimeError(
|
||||
"Option '%s' must be set from directive '%s'" % (
|
||||
old_option.option_name, old_option.directive_name))
|
||||
else:
|
||||
# Warn?
|
||||
_directive_defaults[old_option.directive_name] = value
|
||||
return _directive_defaults
|
||||
|
||||
# Declare compiler directives
|
||||
_directive_defaults = {
|
||||
'boundscheck' : True,
|
||||
'nonecheck' : False,
|
||||
'initializedcheck' : True,
|
||||
'embedsignature' : False,
|
||||
'auto_cpdef': False,
|
||||
'auto_pickle': None,
|
||||
'cdivision': False, # was True before 0.12
|
||||
'cdivision_warnings': False,
|
||||
'c_api_binop_methods': True,
|
||||
'cpow': True,
|
||||
'overflowcheck': False,
|
||||
'overflowcheck.fold': True,
|
||||
'always_allow_keywords': False,
|
||||
'allow_none_for_extension_args': True,
|
||||
'wraparound' : True,
|
||||
'ccomplex' : False, # use C99/C++ for complex types and arith
|
||||
'callspec' : "",
|
||||
'nogil' : False,
|
||||
'profile': False,
|
||||
'linetrace': False,
|
||||
'emit_code_comments': True, # copy original source code into C code comments
|
||||
'annotation_typing': True, # read type declarations from Python function annotations
|
||||
'infer_types': None,
|
||||
'infer_types.verbose': False,
|
||||
'autotestdict': True,
|
||||
'autotestdict.cdef': False,
|
||||
'autotestdict.all': False,
|
||||
'language_level': None,
|
||||
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
|
||||
'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode
|
||||
'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
|
||||
'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
|
||||
'c_string_type': 'bytes',
|
||||
'c_string_encoding': '',
|
||||
'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types
|
||||
'unraisable_tracebacks': True,
|
||||
'old_style_globals': False,
|
||||
'np_pythran': False,
|
||||
'fast_gil': False,
|
||||
|
||||
# set __file__ and/or __path__ to known source/target path at import time (instead of not having them available)
|
||||
'set_initial_path' : None, # SOURCEFILE or "/full/path/to/module"
|
||||
|
||||
'warn': None,
|
||||
'warn.undeclared': False,
|
||||
'warn.unreachable': True,
|
||||
'warn.maybe_uninitialized': False,
|
||||
'warn.unused': False,
|
||||
'warn.unused_arg': False,
|
||||
'warn.unused_result': False,
|
||||
'warn.multiple_declarators': True,
|
||||
|
||||
# optimizations
|
||||
'optimize.inline_defnode_calls': True,
|
||||
'optimize.unpack_method_calls': True, # increases code size when True
|
||||
'optimize.unpack_method_calls_in_pyinit': False, # uselessly increases code size when True
|
||||
'optimize.use_switch': True,
|
||||
|
||||
# remove unreachable code
|
||||
'remove_unreachable': True,
|
||||
|
||||
# control flow debug directives
|
||||
'control_flow.dot_output': "", # Graphviz output filename
|
||||
'control_flow.dot_annotate_defs': False, # Annotate definitions
|
||||
|
||||
# test support
|
||||
'test_assert_path_exists' : [],
|
||||
'test_fail_if_path_exists' : [],
|
||||
|
||||
# experimental, subject to change
|
||||
'binding': None,
|
||||
|
||||
'formal_grammar': False,
|
||||
}
|
||||
|
||||
# Extra warning directives
|
||||
extra_warnings = {
|
||||
'warn.maybe_uninitialized': True,
|
||||
'warn.unreachable': True,
|
||||
'warn.unused': True,
|
||||
}
|
||||
|
||||
def one_of(*args):
|
||||
def validate(name, value):
|
||||
if value not in args:
|
||||
raise ValueError("%s directive must be one of %s, got '%s'" % (
|
||||
name, args, value))
|
||||
else:
|
||||
return value
|
||||
return validate
|
||||
|
||||
|
||||
def normalise_encoding_name(option_name, encoding):
|
||||
"""
|
||||
>>> normalise_encoding_name('c_string_encoding', 'ascii')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'AsCIi')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'us-ascii')
|
||||
'ascii'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'utF8')
|
||||
'utf8'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'utF-8')
|
||||
'utf8'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'deFAuLT')
|
||||
'default'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'default')
|
||||
'default'
|
||||
>>> normalise_encoding_name('c_string_encoding', 'SeriousLyNoSuch--Encoding')
|
||||
'SeriousLyNoSuch--Encoding'
|
||||
"""
|
||||
if not encoding:
|
||||
return ''
|
||||
if encoding.lower() in ('default', 'ascii', 'utf8'):
|
||||
return encoding.lower()
|
||||
import codecs
|
||||
try:
|
||||
decoder = codecs.getdecoder(encoding)
|
||||
except LookupError:
|
||||
return encoding # may exists at runtime ...
|
||||
for name in ('ascii', 'utf8'):
|
||||
if codecs.getdecoder(name) == decoder:
|
||||
return name
|
||||
return encoding
|
||||
|
||||
|
||||
# Override types possibilities above, if needed
|
||||
directive_types = {
|
||||
'language_level': str, # values can be None/2/3/'3str', where None == 2+warning
|
||||
'auto_pickle': bool,
|
||||
'locals': dict,
|
||||
'final' : bool, # final cdef classes and methods
|
||||
'nogil' : bool,
|
||||
'internal' : bool, # cdef class visibility in the module dict
|
||||
'infer_types' : bool, # values can be True/None/False
|
||||
'binding' : bool,
|
||||
'cfunc' : None, # decorators do not take directive value
|
||||
'ccall' : None,
|
||||
'inline' : None,
|
||||
'staticmethod' : None,
|
||||
'cclass' : None,
|
||||
'no_gc_clear' : bool,
|
||||
'no_gc' : bool,
|
||||
'returns' : type,
|
||||
'exceptval': type, # actually (type, check=True/False), but has its own parser
|
||||
'set_initial_path': str,
|
||||
'freelist': int,
|
||||
'c_string_type': one_of('bytes', 'bytearray', 'str', 'unicode'),
|
||||
'c_string_encoding': normalise_encoding_name,
|
||||
'cpow': bool
|
||||
}
|
||||
|
||||
for key, val in _directive_defaults.items():
|
||||
if key not in directive_types:
|
||||
directive_types[key] = type(val)
|
||||
|
||||
directive_scopes = { # defaults to available everywhere
|
||||
# 'module', 'function', 'class', 'with statement'
|
||||
'auto_pickle': ('module', 'cclass'),
|
||||
'final' : ('cclass', 'function'),
|
||||
'nogil' : ('function', 'with statement'),
|
||||
'inline' : ('function',),
|
||||
'cfunc' : ('function', 'with statement'),
|
||||
'ccall' : ('function', 'with statement'),
|
||||
'returns' : ('function',),
|
||||
'exceptval' : ('function',),
|
||||
'locals' : ('function',),
|
||||
'staticmethod' : ('function',), # FIXME: analysis currently lacks more specific function scope
|
||||
'no_gc_clear' : ('cclass',),
|
||||
'no_gc' : ('cclass',),
|
||||
'internal' : ('cclass',),
|
||||
'cclass' : ('class', 'cclass', 'with statement'),
|
||||
'autotestdict' : ('module',),
|
||||
'autotestdict.all' : ('module',),
|
||||
'autotestdict.cdef' : ('module',),
|
||||
'set_initial_path' : ('module',),
|
||||
'test_assert_path_exists' : ('function', 'class', 'cclass'),
|
||||
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
|
||||
'freelist': ('cclass',),
|
||||
'emit_code_comments': ('module',),
|
||||
'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope
|
||||
# Avoid scope-specific to/from_py_functions for c_string.
|
||||
'c_string_type': ('module',),
|
||||
'c_string_encoding': ('module',),
|
||||
'type_version_tag': ('module', 'cclass'),
|
||||
'language_level': ('module',),
|
||||
# globals() could conceivably be controlled at a finer granularity,
|
||||
# but that would complicate the implementation
|
||||
'old_style_globals': ('module',),
|
||||
'np_pythran': ('module',),
|
||||
'fast_gil': ('module',),
|
||||
'iterable_coroutine': ('module', 'function'),
|
||||
}
|
||||
|
||||
|
||||
def parse_directive_value(name, value, relaxed_bool=False):
|
||||
"""
|
||||
Parses value as an option value for the given name and returns
|
||||
the interpreted value. None is returned if the option does not exist.
|
||||
|
||||
>>> print(parse_directive_value('nonexisting', 'asdf asdfd'))
|
||||
None
|
||||
>>> parse_directive_value('boundscheck', 'True')
|
||||
True
|
||||
>>> parse_directive_value('boundscheck', 'true')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: boundscheck directive must be set to True or False, got 'true'
|
||||
|
||||
>>> parse_directive_value('c_string_encoding', 'us-ascii')
|
||||
'ascii'
|
||||
>>> parse_directive_value('c_string_type', 'str')
|
||||
'str'
|
||||
>>> parse_directive_value('c_string_type', 'bytes')
|
||||
'bytes'
|
||||
>>> parse_directive_value('c_string_type', 'bytearray')
|
||||
'bytearray'
|
||||
>>> parse_directive_value('c_string_type', 'unicode')
|
||||
'unicode'
|
||||
>>> parse_directive_value('c_string_type', 'unnicode')
|
||||
Traceback (most recent call last):
|
||||
ValueError: c_string_type directive must be one of ('bytes', 'bytearray', 'str', 'unicode'), got 'unnicode'
|
||||
"""
|
||||
type = directive_types.get(name)
|
||||
if not type:
|
||||
return None
|
||||
orig_value = value
|
||||
if type is bool:
|
||||
value = str(value)
|
||||
if value == 'True':
|
||||
return True
|
||||
if value == 'False':
|
||||
return False
|
||||
if relaxed_bool:
|
||||
value = value.lower()
|
||||
if value in ("true", "yes"):
|
||||
return True
|
||||
elif value in ("false", "no"):
|
||||
return False
|
||||
raise ValueError("%s directive must be set to True or False, got '%s'" % (
|
||||
name, orig_value))
|
||||
elif type is int:
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
raise ValueError("%s directive must be set to an integer, got '%s'" % (
|
||||
name, orig_value))
|
||||
elif type is str:
|
||||
return str(value)
|
||||
elif callable(type):
|
||||
return type(name, value)
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
||||
def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
|
||||
current_settings=None):
|
||||
"""
|
||||
Parses a comma-separated list of pragma options. Whitespace
|
||||
is not considered.
|
||||
|
||||
>>> parse_directive_list(' ')
|
||||
{}
|
||||
>>> (parse_directive_list('boundscheck=True') ==
|
||||
... {'boundscheck': True})
|
||||
True
|
||||
>>> parse_directive_list(' asdf')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Expected "=" in option "asdf"
|
||||
>>> parse_directive_list('boundscheck=hey')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: boundscheck directive must be set to True or False, got 'hey'
|
||||
>>> parse_directive_list('unknown=True')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Unknown option: "unknown"
|
||||
>>> warnings = parse_directive_list('warn.all=True')
|
||||
>>> len(warnings) > 1
|
||||
True
|
||||
>>> sum(warnings.values()) == len(warnings) # all true.
|
||||
True
|
||||
"""
|
||||
if current_settings is None:
|
||||
result = {}
|
||||
else:
|
||||
result = current_settings
|
||||
for item in s.split(','):
|
||||
item = item.strip()
|
||||
if not item:
|
||||
continue
|
||||
if '=' not in item:
|
||||
raise ValueError('Expected "=" in option "%s"' % item)
|
||||
name, value = [s.strip() for s in item.strip().split('=', 1)]
|
||||
if name not in _directive_defaults:
|
||||
found = False
|
||||
if name.endswith('.all'):
|
||||
prefix = name[:-3]
|
||||
for directive in _directive_defaults:
|
||||
if directive.startswith(prefix):
|
||||
found = True
|
||||
parsed_value = parse_directive_value(directive, value, relaxed_bool=relaxed_bool)
|
||||
result[directive] = parsed_value
|
||||
if not found and not ignore_unknown:
|
||||
raise ValueError('Unknown option: "%s"' % name)
|
||||
else:
|
||||
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
|
||||
result[name] = parsed_value
|
||||
return result
|
||||
|
||||
|
||||
def parse_variable_value(value):
|
||||
"""
|
||||
Parses value as an option value for the given name and returns
|
||||
the interpreted value.
|
||||
|
||||
>>> parse_variable_value('True')
|
||||
True
|
||||
>>> parse_variable_value('true')
|
||||
'true'
|
||||
>>> parse_variable_value('us-ascii')
|
||||
'us-ascii'
|
||||
>>> parse_variable_value('str')
|
||||
'str'
|
||||
>>> parse_variable_value('123')
|
||||
123
|
||||
>>> parse_variable_value('1.23')
|
||||
1.23
|
||||
|
||||
"""
|
||||
if value == "True":
|
||||
return True
|
||||
elif value == "False":
|
||||
return False
|
||||
elif value == "None":
|
||||
return None
|
||||
elif value.isdigit():
|
||||
return int(value)
|
||||
else:
|
||||
try:
|
||||
value = float(value)
|
||||
except Exception:
|
||||
# Not a float
|
||||
pass
|
||||
return value
|
||||
|
||||
|
||||
def parse_compile_time_env(s, current_settings=None):
|
||||
"""
|
||||
Parses a comma-separated list of pragma options. Whitespace
|
||||
is not considered.
|
||||
|
||||
>>> parse_compile_time_env(' ')
|
||||
{}
|
||||
>>> (parse_compile_time_env('HAVE_OPENMP=True') ==
|
||||
... {'HAVE_OPENMP': True})
|
||||
True
|
||||
>>> parse_compile_time_env(' asdf')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Expected "=" in option "asdf"
|
||||
>>> parse_compile_time_env('NUM_THREADS=4') == {'NUM_THREADS': 4}
|
||||
True
|
||||
>>> parse_compile_time_env('unknown=anything') == {'unknown': 'anything'}
|
||||
True
|
||||
"""
|
||||
if current_settings is None:
|
||||
result = {}
|
||||
else:
|
||||
result = current_settings
|
||||
for item in s.split(','):
|
||||
item = item.strip()
|
||||
if not item:
|
||||
continue
|
||||
if '=' not in item:
|
||||
raise ValueError('Expected "=" in option "%s"' % item)
|
||||
name, value = [s.strip() for s in item.split('=', 1)]
|
||||
result[name] = parse_variable_value(value)
|
||||
return result
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user