commit ffb97bcf81222679d38c99fc7fa8dc0a7c930a8d Author: Federico Poni Date: Mon Jul 1 10:53:02 2024 +0200 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cb14e7f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.DS_Store +venv/ diff --git a/app.py b/app.py new file mode 100644 index 0000000..115dc1f --- /dev/null +++ b/app.py @@ -0,0 +1,389 @@ +import os +import shutil +import json +import subprocess +import markdown +from jinja2 import Environment, FileSystemLoader + + +def delete_project(): + + cms_file = "cms.json" + + if not os.path.exists(cms_file): + print("No projects found. Please generate a template directory first.") + return + + with open(cms_file, 'r') as f: + cms_data = json.load(f) + + # Check if categories exist + if not cms_data.get("categories"): + print("Error: No categories found in the CMS.") + return + + # List available projects + print("Available projects:") + for i, (project_slug, project_data) in enumerate(cms_data["projects"].items(), start=1): + project_name = project_data.get("name", "") + print("{}. {}".format(i, project_name)) + + # Ask user to choose a project to edit + project_index = int(input("Enter the number of the project to edit: ")) - 1 + project_slug = list(cms_data["projects"].keys())[project_index] + + cms_data['projects'].pop(project_slug) + + + with open(cms_file, 'w') as file: + json.dump(cms_data, file, indent=2) + + shutil.rmtree(f'website/projects/{project_slug}') + + print(f"Removed '{project_index}' from the projects.") + + + +def generate_template(): + project_name_it = input("Enter the name of the project in italian: ") + project_name_en = input("Enter the name of the project in english: ") + project_slug = project_name_it.lower().replace(" ", "_") + + project_dir = os.path.join("website/projects", project_slug) + en_dir = os.path.join(project_dir, "en") + assets_dir = os.path.join(project_dir, "assets") + markdown_file = os.path.join(project_dir, "content.md") + markdown_en_file = os.path.join(en_dir, "content.md") + cms_file = "cms.json" + + os.makedirs(en_dir, exist_ok=True) + os.makedirs(assets_dir, exist_ok=True) + open(markdown_file, 'a').close() # Create empty markdown files + open(markdown_en_file, 'a').close() + + # Update or create a key in the JSON file + if os.path.exists(cms_file): + with open(cms_file, 'r') as f: + cms_data = json.load(f) + else: + os.makedirs("website/en", exist_ok=True) + cms_data = {"projects":{}, "categories":[]} # Initialize categories list + + cms_data["projects"][project_slug] = {"name": [project_name_it, project_name_en]} + + with open(cms_file, 'w') as f: + json.dump(cms_data, f, indent=4) + + print("Template directory created for project '{}'.\n" + "Add images to '{}' and write content in '{}'.".format(project_name_it, assets_dir, markdown_file)) + + +def add_categories_to_cms(): + cms_file = "cms.json" + + if not os.path.exists(cms_file): + print("No projects found. Please generate a template directory first.") + return + + with open(cms_file, 'r') as f: + cms_data = json.load(f) + + category = [] + categories = cms_data.get("categories", []) + categoryIt = input("Enter the name of the category to add in italian: ") + categoryEn = input("Enter the name of the category to add in english: ") + + category = [categoryIt, categoryEn] + + + categories.append(category) + + cms_data["categories"] = categories + + with open(cms_file, 'w') as f: + json.dump(cms_data, f, indent=4) + + print("Category '{}' added to CMS.".format(category)) + + + +def prepare_project(): + cms_file = "cms.json" + + if not os.path.exists(cms_file): + print("No projects found. Please generate a template directory first.") + return + + with open(cms_file, 'r') as f: + cms_data = json.load(f) + + # Check if categories exist + if not cms_data.get("categories"): + print("Error: No categories found in the CMS.") + return + + # List available projects + print("Available projects:") + for i, (project_slug, project_data) in enumerate(cms_data["projects"].items(), start=1): + project_name = project_data.get("name", "") + print("{}. {}".format(i, project_name)) + + # Ask user to choose a project to edit + project_index = int(input("Enter the number of the project to edit: ")) - 1 + project_slug = list(cms_data["projects"].keys())[project_index] + project_data = cms_data["projects"][project_slug] + project_name = project_data.get("name", "") + + # List available categories + print("Available categories:") + for i, category in enumerate(cms_data["categories"], start=1): + print("{}. {}".format(i, category)) + + # Check if there are no categories + if not cms_data["categories"]: + print("Error: No categories found in the CMS.") + return + + # Ask user to choose a category + category_index = int(input("Enter the number of the category to assign to the project: ")) - 1 + selected_category = cms_data["categories"][category_index] + + # Update project data with the selected category + project_data["category"] = selected_category + + # Proceed with editing the selected project + project_dir = os.path.join("website/projects", project_slug) + en_dir = os.path.join(project_dir, "en") + assets_dir = os.path.join(project_dir, "assets") + # compressed_dir = os.path.join(assets_dir, "compressed") + markdown_file = os.path.join(project_dir, "content.md") + markdown_en_file = os.path.join(en_dir, "content.md") + + # Check if assets folder exists + if not os.path.exists(assets_dir): + print("Error: Assets folder not found for project '{}'. " + "Please make sure the 'assets' folder exists.".format(project_name)) + return + + # Check if there are no pictures in the assets folder + if not os.listdir(assets_dir): + print("Error: No pictures found in the 'assets' folder for project '{}'." + " Please add pictures before proceeding.".format(project_name)) + return + + # Compress/resize images + # os.makedirs(compressed_dir, exist_ok=True) + image_files = [f for f in os.listdir(assets_dir) if os.path.isfile(os.path.join(assets_dir, f))] + for image_file in image_files: + subprocess.run(["convert", os.path.join(assets_dir, image_file), "-resize", "50%", "-define", "jpeg:extent=512kb", os.path.join(assets_dir, image_file)]) + + # Prompt user to select main picture + print("Available pictures:") + for i, image_file in enumerate(image_files, start=1): + print("{}. {}".format(i, image_file)) + print("Enter the number of the main picture (or press Enter to keep current): ") + main_picture_index = input() + if main_picture_index: + main_picture_index = int(main_picture_index) - 1 + if 0 <= main_picture_index < len(image_files): + main_picture = image_files[main_picture_index] + else: + print("Invalid picture number. Keeping current main picture.") + main_picture = project_data.get("main_picture", "") + else: + main_picture = project_data.get("main_picture", "") + + # Prompt user to choose if main picture appears in the gallery + main_in_gallery = True + main_in_gallery_boolean = input("Do you want the main picture be in the gallery? Yes / No :") + if main_in_gallery_boolean == "yes" or main_in_gallery_boolean == "Yes": + main_in_gallery = True + else: + main_in_gallery = False + + # Prompt user to update captions for images + captions_it = project_data.get("captions_it", {}) + captions_en = project_data.get("captions_en", {}) + for image_file in image_files: + if not main_in_gallery: + if image_file == main_picture: + continue + + print("Current caption for '{}' (Italian): {}".format(image_file, captions_it.get(image_file, "No caption"))) + new_caption_it = input("Enter new caption for '{}' (Italian) (press Enter to keep current): ".format(image_file)).strip() + if new_caption_it: + captions_it[image_file] = new_caption_it + elif image_file not in captions_it: + captions_it[image_file] = "" + + print("Current caption for '{}' (English): {}".format(image_file, captions_en.get(image_file, "No caption"))) + new_caption_en = input("Enter new caption for '{}' (English) (press Enter to keep current): ".format(image_file)).strip() + if new_caption_en: + captions_en[image_file] = new_caption_en + elif image_file not in captions_en: + captions_en[image_file] = "" + + # Prompt user to update video URL + print("Current video URL: {}".format(project_data.get("video_url", "None"))) + video_url = input("Enter new video URL (press Enter to keep current): ").strip() + if not video_url: + video_url = project_data.get("video_url", "") + + # Convert markdown file to HTML for Italian + with open(markdown_file, 'r') as f: + markdown_content_it = f.read() + markdown_content_it = markdown.markdown(markdown_content_it) + html_content_it = markdown_content_it + + # Convert markdown file to HTML for English + with open(markdown_en_file, 'r') as f: + markdown_content_en = f.read() + markdown_content_en = markdown.markdown(markdown_content_en) + html_content_en = markdown_content_en + + # Update project data in JSON file + project_data["main_picture"] = main_picture + project_data["captions_it"] = captions_it + project_data["captions_en"] = captions_en + project_data["video_url"] = video_url + project_data["html_content_it"] = html_content_it + project_data["html_content_en"] = html_content_en + + + # Save updated JSON file + with open(cms_file, 'w') as f: + json.dump(cms_data, f, indent=4) + + print("Project preparation complete.") + + + +def generate_website(): + cms_file = "cms.json" + + if not os.path.exists(cms_file): + print("No projects found. Please generate a template directory first.") + return + + with open(cms_file, 'r') as f: + cms_data = json.load(f) + + # markdown misc content + misc_content_en = {} + misc_content_it = {} + + # Get a list of Markdown files in the 'en' folder + markdown_files_en = [f for f in os.listdir('misc/en') if f.endswith('.md')] + + # Iterate through each Markdown file in the 'en' folder + for file_name in markdown_files_en: + file_path = os.path.join('misc/en', file_name) + + # Read the content of the Markdown file + with open(file_path, 'r', encoding='utf-8') as file: + content = file.read() + + # Convert Markdown content to HTML + html_content = markdown.markdown(content) + + # Store the HTML content in the dictionary with the file name as the key + misc_content_en[file_name.replace('.md','')] = html_content + + # Get a list of Markdown files in the 'it' folder + markdown_files_it = [f for f in os.listdir('misc/it') if f.endswith('.md')] + + # Iterate through each Markdown file in the 'it' folder + for file_name in markdown_files_it: + file_path = os.path.join('misc/it', file_name) + + # Read the content of the Markdown file + with open(file_path, 'r', encoding='utf-8') as file: + content = file.read() + + # Convert Markdown content to HTML + html_content = markdown.markdown(content) + + # Store the HTML content in the dictionary with the file name as the key + misc_content_it[file_name.replace('.md','')] = html_content + + + + gallery_pics = [f for f in os.listdir('website/galleria') if not f.startswith('.')] + + # Get gallery images + # os.makedirs('website/galleria/compressed', exist_ok=True) + + # Compress/resize images + for gallery_pic in gallery_pics: + if not gallery_pic.endswith('compressed'): + subprocess.run(["convert", os.path.join('website/galleria', gallery_pic), "-resize", "50%", "-define", "jpeg:extent=512kb", os.path.join('website/galleria/', gallery_pic)]) + + # Initialize Jinja environment + env = Environment(loader=FileSystemLoader('.')) + + # Render index page + # In italian + index_template_it = env.get_template('template/index_template.html') + index_html_it = index_template_it.render(cms_data=cms_data, gallery_pics=gallery_pics, misc_content=misc_content_it) + + with open('website/index.html', 'w') as index_file: + index_file.write(index_html_it) + + # In english + index_template_en = env.get_template('template/index_template_en.html') + index_html_en = index_template_en.render(cms_data=cms_data, gallery_pics=gallery_pics, misc_content=misc_content_en) + + with open('website/en/index.html', 'w') as index_file: + index_file.write(index_html_en) + + + # Render project pages + + # In Italian + project_template = env.get_template('template/project_template.html') + + for project, values in cms_data["projects"].items(): + project_html = project_template.render(values=values, cms_data=cms_data, misc_content=misc_content_it) + project_dir = os.path.join("website/projects", project) + with open(os.path.join(project_dir, "index.html"), 'w') as project_file: + project_file.write(project_html) + + # In english + project_template_en = env.get_template('template/project_template_en.html') + + for project, values in cms_data["projects"].items(): + project_html = project_template_en.render(values=values, cms_data=cms_data, misc_content=misc_content_en) + project_dir = os.path.join("website/projects/", project,'en/') + with open(os.path.join(project_dir, "index.html"), 'w') as project_file: + project_file.write(project_html) + + print("Website generation complete.") + + +def main(): + print("Select an option:") + print("1. Generate a template directory") + print("2. Prepare a project by scanning the directory") + print("3. Add categories") + print("4. Generate the website") + print("5. Delete a project") + print("6. Exit") + choice = input("Enter your choice (1, 2, 3, 4, or 5): ") + + if choice == '1': + generate_template() + elif choice == '2': + prepare_project() + elif choice == '3': + add_categories_to_cms() + elif choice == '4': + generate_website() + elif choice == '5': + delete_project() + elif choice == '6': + exit() + else: + print("Invalid choice. Please enter 1, 2, 3, or 4.") + +if __name__ == "__main__": + main() diff --git a/bin/Activate.ps1 b/bin/Activate.ps1 new file mode 100644 index 0000000..b49d77b --- /dev/null +++ b/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/bin/activate b/bin/activate new file mode 100644 index 0000000..6f2a7de --- /dev/null +++ b/bin/activate @@ -0,0 +1,70 @@ +# This file must be used with "source bin/activate" *from bash* +# You cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # Call hash to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + hash -r 2> /dev/null + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath "/Users/poni/lorenzo-web/script") +else + # use the path as-is + export VIRTUAL_ENV="/Users/poni/lorenzo-web/script" +fi + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(script) ${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT="(script) " + export VIRTUAL_ENV_PROMPT +fi + +# Call hash to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +hash -r 2> /dev/null diff --git a/bin/activate.csh b/bin/activate.csh new file mode 100644 index 0000000..abd6091 --- /dev/null +++ b/bin/activate.csh @@ -0,0 +1,27 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. + +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/Users/poni/lorenzo-web/script" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(script) $prompt" + setenv VIRTUAL_ENV_PROMPT "(script) " +endif + +alias pydoc python -m pydoc + +rehash diff --git a/bin/activate.fish b/bin/activate.fish new file mode 100644 index 0000000..d77f978 --- /dev/null +++ b/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/). You cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/Users/poni/lorenzo-web/script" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(script) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT "(script) " +end diff --git a/bin/markdown_py b/bin/markdown_py new file mode 100755 index 0000000..89e1168 --- /dev/null +++ b/bin/markdown_py @@ -0,0 +1,8 @@ +#!/Users/poni/lorenzo-web/script/bin/python3.12 +# -*- coding: utf-8 -*- +import re +import sys +from markdown.__main__ import run +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(run()) diff --git a/bin/pip b/bin/pip new file mode 100755 index 0000000..f936ad2 --- /dev/null +++ b/bin/pip @@ -0,0 +1,8 @@ +#!/Users/poni/lorenzo-web/script/bin/python3.12 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/pip3 b/bin/pip3 new file mode 100755 index 0000000..f936ad2 --- /dev/null +++ b/bin/pip3 @@ -0,0 +1,8 @@ +#!/Users/poni/lorenzo-web/script/bin/python3.12 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/pip3.12 b/bin/pip3.12 new file mode 100755 index 0000000..f936ad2 --- /dev/null +++ b/bin/pip3.12 @@ -0,0 +1,8 @@ +#!/Users/poni/lorenzo-web/script/bin/python3.12 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/python b/bin/python new file mode 120000 index 0000000..11b9d88 --- /dev/null +++ b/bin/python @@ -0,0 +1 @@ +python3.12 \ No newline at end of file diff --git a/bin/python3 b/bin/python3 new file mode 120000 index 0000000..11b9d88 --- /dev/null +++ b/bin/python3 @@ -0,0 +1 @@ +python3.12 \ No newline at end of file diff --git a/bin/python3.12 b/bin/python3.12 new file mode 120000 index 0000000..f3d5d0b --- /dev/null +++ b/bin/python3.12 @@ -0,0 +1 @@ +/usr/local/opt/python@3.12/bin/python3.12 \ No newline at end of file diff --git a/cms.json b/cms.json new file mode 100644 index 0000000..bc0c7c6 --- /dev/null +++ b/cms.json @@ -0,0 +1,208 @@ +{ + "projects": { + "confabulazioni": { + "name": [ + "Confabulazioni", + "Confabulazioni" + ], + "category": [ + "Prosa", + "Theater" + ], + "main_picture": "Locandina - opzione A.jpg", + "captions_it": { + "Locandina - opzione B.jpg": "", + "4.jpg": "", + "5.jpg": "", + "7.jpg": "", + "6.jpg": "", + "2.jpg": "", + "3.jpg": "", + "1.jpg": "" + }, + "captions_en": { + "Locandina - opzione B.jpg": "", + "4.jpg": "", + "5.jpg": "", + "7.jpg": "", + "6.jpg": "", + "2.jpg": "", + "3.jpg": "", + "1.jpg": "" + }, + "video_url": "", + "html_content_it": "
    \n
  • di Eleonora Paris
  • \n
  • regia Lorenzo Ponte
  • \n
  • tutor Michele De Vita Conti
  • \n
  • con Elena Callegari, Alice Conti, Marco Vergani\ne con Sebastiano Bronzato, Chiara Cali\u00f2, Eleonora Paris, Lorenzo Ponte
  • \n
  • musiche dal vivo Sebastiano Bronzato
  • \n
  • adattamento musicale Gabriele Gramaglia, Edoardo Grittini, Gianfranco Pedroli
  • \n
  • scene e costumi Serena D\u2019Orlando, Valentina Silva, Lorenzo Vigevani
  • \n
  • videomaker Chiara Cali\u00f2
  • \n
  • tecnica video Fabio Brusadin
  • \n
  • organizzazione e promozione Simona Conforti, Elisa Piasente, Camilla Rizzi
  • \n
  • Spettacolo inserito nella rassegna teatrale Teste Inedite
  • \n
  • realizzata da autori, registi e organizzatori della Civica Scuola di Teatro Paolo Grassi
  • \n
\n

Note di regia

\n

Confabulazioni di Eleonora Paris \u00e8 una storia di formazione; i personaggi faticano ad affrontare la realt\u00e0, l\u2019inevitabile fine delle cose. Mettono in scena un matrimonio, il pi\u00f9 tradizionale dei riti, per esorcizzare la paura della morte della nonna e fuggire dalla complessit\u00e0 di una relazione d\u2019amore. Verit\u00e0 e menzogna si intrecciano in questo testo, tutto \u00e8 finto, nulla \u00e8 falso. Ognuno dei personaggi ha le sue ragioni per affrontare questo gioco di ruoli, tutti si impegnano affinch\u00e9 la festa riesca al meglio. Dentro i personaggi si agitano segreti e pensieri irrisolti, fuori tutto deve apparire perfetto. In scena musicisti e operatori video affiancano gli attori per costruire l\u2019immagine di un\u2019unione felice. Se all\u2019inizio le regole del gioco sono chiare, procedendo i confini sfumano e il gioco si fa via via pi\u00f9 pericoloso. Lo sforzo continuo di rendere credibile la festa finisce per soggiogare i personaggi alla loro stessa menzogna. In un\u2019epoca in cui l\u2019autenticit\u00e0 e il benessere individuale sono i valori su cui si fonda la nostra societ\u00e0, qual \u00e8 lo stato di salute dei modelli che hanno tentato di assicurarci la felicit\u00e0 di coppia fino ad oggi?

\n

Lorenzo Ponte

\n

Note di drammaturgia

\n

Due amici d\u2019infanzia si rivedono a un anno di distanza. Lui vive a Londra, lei vive in Italia con sua nonna, una donna molto anziana. Per realizzare il desiderio della nonna di vedere sua nipote sposata prima di morire, i due decidono di organizzare un finto matrimonio. Questa finzione per\u00f2 riapre in loro dei problemi irrisolti e la loro stessa messinscena li spinger\u00e0 a guardarsi veramente e ad affrontare la complessit\u00e0 del loro rapporto. Confabulazioni \u00e8 un testo che parla di solitudine, una condizione che i personaggi vivono con timore ma che al tempo stesso vedono come l\u2019unica possibilit\u00e0 per esprimere la propria libert\u00e0 personale. La paura di non bastare a s\u00e9 stessi e la paura che l\u2019altro limiti la propria indipendenza sono i due poli entro i quali i personaggi si muovono. I loro ricordi si confondono con le narrazioni idealizzate del loro rapporto: se i ricordi creano una storia e questa storia \u00e8 costruita sulle proprie fantasie non si hanno pi\u00f9 parametri per valutare la realt\u00e0. Tutto vale, ma se tutto vale niente ha pi\u00f9 valore. Attraverso questo testo ho cercato di indagare il modo in cui oggi si ridefiniscono i rapporti affettivi e come la paura di perdere l\u2019altro ci impedisca di scoprirlo realmente.

\n

Eleonora Paris

\n

Giugno 2018

", + "html_content_en": "
    \n
  • written by Eleonora Paris
  • \n
  • directed by Lorenzo Ponte
  • \n
  • tutor Michele De Vita Conti
  • \n
  • with Elena Callegari, Alice Conti, Marco Vergani\nand Sebastiano Bronzato, Chiara Cali\u00f2, Eleonora Paris, Lorenzo Ponte
  • \n
  • live music Sebastiano Bronzato
  • \n
  • soundtrack Gabriele Gramaglia, Edoardo Grittini, Gianfranco Pedroli
  • \n
  • set and costume design Serena D\u2019Orlando, Valentina Silva, Lorenzo Vigevani
  • \n
  • videomaker Chiara Cali\u00f2
  • \n
  • video set up Fabio Brusadin
  • \n
  • organization and promotion Simona Conforti, Elisa Piasente, Camilla Rizzi
  • \n
  • Show included in Teste Inedite, Civica Scuola di Teatro Paolo Grassi
  • \n
\n

Director's notes

\n

Confabulazioni by Eleonora Paris is a coming-of-age story; the characters struggle to face reality, the inevitable end of things. They stage a wedding, the most traditional of rites, to exorcise the fear of their grandmother's death and escape from the complexity of a love relationship. Truth and lies are intertwined in this text, everything is fake, nothing is false. Each of the characters has their own reasons for tackling this role play, everyone is committed to making sure the party is as successful as possible. Secrets and unresolved thoughts swirl inside the characters, everything must appear perfect on the outside. On stage, musicians and videographers work alongside the actors to build the image of a happy union. If at the beginning the rules of the game are clear, as the game progresses the boundaries blur and the game gradually becomes more dangerous. The continuous effort to make the party credible ends up subjugating the characters to their own lie. In a society founded on the values of authenticity and individual well-being, what is the state of health of the models who have attempted to ensure the happiness of people?

\n

Lorenzo Ponte

\n

Notes on dramaturgy

\n

Two childhood friends meet again a year later. He lives in London, she lives in Italy with her grandmother, a very old woman. To fulfill the grandmother's wish to see her granddaughter married before she dies, the two decide to organize a fake wedding. This fiction, however, reopens unresolved problems in them and their very staging will push them to really look at each other and face the complexity of their relationship. Confabulazioni is a text that talks about loneliness, a condition that the characters live with fear but at the same time see as the only possibility to express their personal freedom. The fear of not being enough for oneself and the fear that others will limit one's independence are the two poles within which the characters move. Their memories become confused with the idealized narratives of their relationship: if the memories create a story and this story is built on one's fantasies, one no longer has parameters to evaluate reality. Everything is valid, but if everything is valid nothing has value anymore.\nThrough this text I tried to investigate the way in which emotional relationships are redefined today and how the fear of losing the other prevents us from truly discovering it.

\n

Eleonora Paris

\n

June 2018

" + }, + "tu_sei_agatha": { + "name": [ + "Tu Sei Agatha", + "You Are Agatha" + ], + "category": [ + "Prosa", + "Theater" + ], + "main_picture": "A - Locandina.png", + "captions_it": {}, + "captions_en": {}, + "video_url": "", + "html_content_it": "
    \n
  • da Agatha di Marguerite Duras
  • \n
  • adattamento e regia Lorenzo Ponte
  • \n
  • con Christian La Rosa e Valentina Picello
  • \n
  • scena Davide Signorini
  • \n
  • musica Sebastiano Bronzato
  • \n
  • luci Giuliano Almerighi
  • \n
  • Foto Luca de Pia
  • \n
  • produzione Teatro Franco Parenti
  • \n
\n

Si ringraziano Sabrina Sinatti, tutor del progetto - Civica Scuola di Teatro Paolo Grassi - e Lab121.\nSi ringrazia per la collaborazione VIE dei festival - Roma e la rassegna Stanze, per cui lo spettacolo \u00e8 andato in scena a Palazzo Boschi Di Stefano.

\n

Esistono legami che niente \u00e8 in grado di scalfire. Tu sei Agatha d\u00e0 forma poetica a un\nmistero irrappresentabile: l'amore disperato e impossibile tra un fratello e una sorella. Tra\nle righe del testo si legge velata la bruciante autobiografia di Marguerite Duras, del suo\namore per il fratello Paulo morto giovane e dei suoi ricordi.

\n

Nella villa d'infanzia, ormai abbandonata, Agata, incontra il fratello che non vede da anni.\nHa deciso di partire e interrompere il rapporto incestuoso che li unisce; ha incontrato un\nuomo e andr\u00e0 a vivere con lui ma la possibilit\u00e0 di una separazione fa riemergere ricordi,\nnodi di un legame, difficili da sciogliere. Il loro rapporto incestuoso e indissolubile \u00e8 uno\nspecchio rovesciato che riflette la nostra difficolt\u00e0 ad incontrare ed accettare la libert\u00e0\ndell'Altro e il dolore del fallimento.

\n

La scena \u00e8 vuota per fare spazio a un rito d'incontro tra due corpi che si sono sempre\ncercati senza mai toccarsi. Lo spazio teatrale diventa il luogo ideale per indagare il cuore\ndella relazione e liberare quel feroce desiderio a lungo frustrato.

\n

Tu sei Agatha \u00e8 la storia di un amore e del tentativo di sopravvivere alla sua forza.

\n

Ottobre 2018

", + "html_content_en": "
    \n
  • from Agatha by Marguerite Duras
  • \n
  • dramaturgy and direction Lorenzo Ponte
  • \n
  • with Christian La Rosa and Valentina Picello
  • \n
  • set design Davide Signorini
  • \n
  • music Sebastiano Bronzato
  • \n
  • light design Giuliano Almerighi
  • \n
  • Pictures by Luca Del Pia
  • \n
  • Production Teatro Franco parenti
  • \n
\n

Thanks to Sabrina Sinatti, project tutor - Civica Scuola di Teatro Paolo Grassi - Lab121 - VIE dei Festival, Rome - Stanze, Milan

\n

There are bonds that nothing can undermine. You are Agatha gives poetic form to an\nunrepresentable mystery: the desperate and impossible love between a brother and a\nsister. Hidden in this fictional story there is of course the burning autobiography of\nMarguerite Duras and the memory of her love for her brother Paulo who died when he was\nan adolescent.

\n

In her childhood villa, now abandoned, Agatha meets her brother. She has decided to\nleave and interrupt the incestuous relationship that unites them; she has met a man and\nwill go to live with him. However the impending separation brings back memories, which\nare not fading away. Their incestuous and indissoluble relationship is an inverted mirror\nthat reflects our difficulty in meeting and accepting the freedom of the Other and the pain\nof failure.

\n

The stage is empty in order to host a ritual between two bodies that have always looked\nfor each other without being able to touch. Theater becomes the ideal place to investigate\nthe heart of the relationship and release that ferocious desire that has long been\nfrustrated.

\n

Tu sei Agatha is the story of a love and the attempt to survive its strength.

\n

October 2018

" + }, + "buoni_a_nulla": { + "name": [ + "Buoni a Nulla", + "Good for Nothing" + ], + "category": [ + "Prosa", + "Theater" + ], + "main_picture": "Locandina.JPG", + "captions_it": { + "6c.JPG": "", + "6b.JPG": "", + "8.JPG": "", + "9.JPG": "", + "1a.JPG": "", + "10.JPG": "", + "2b.JPG": "", + "4.JPG": "", + "5.JPG": "", + "7.JPG": "", + "6.JPG": "", + "2.JPG": "", + "1.JPG": "" + }, + "captions_en": { + "6c.JPG": "", + "6b.JPG": "", + "8.JPG": "", + "9.JPG": "", + "1a.JPG": "", + "10.JPG": "", + "2b.JPG": "", + "4.JPG": "", + "5.JPG": "", + "7.JPG": "", + "6.JPG": "", + "2.JPG": "", + "1.JPG": "" + }, + "video_url": "", + "html_content_it": "
    \n
  • testo e regia Lorenzo Ponte
  • \n
  • con Tobia Dal Corso Polzot, Paola Galassi e Luca Oldani
  • \n
  • scena Davide Signorini
  • \n
  • costumi Giulia Rossena
  • \n
  • luci Emanuele Agliati
  • \n
  • suono Gaetano Pappalardo e Simone Sigurani
  • \n
  • assistente regia Filippo Capobianco
  • \n
  • voce giornale radio Pietro Adami
  • \n
  • con la supervisione artistica di Giuliana Musso
  • \n
  • Foto di Luca Del Pia
  • \n
  • produzione Teatro Franco Parenti
  • \n
\n

con il sostegno di PRAXIS - Olinda/TeatroLaCucina - Vincitore bando Animali Teatrali Fantastici 2021 - ZONA K nell'ambito del progetto IntercettAzioni - Centro di Residenza Artistica della Lombardia; Fondazione Claudia Lombardi per il teatro

\n

Osservare e stare ai margini per capire cosa sta al centro della nostra societa.\nGuidati da questa idea, per 3 anni abbiamo fatto un percorso di conoscenza e\navvicinamento alla vita delle persone senza dimora nella citt\u00e0 di Milano. Tra il 2019\ne il 2021, la compagnia ha affiancato unit\u00e0 di strade composte da psicologi,\neducatori e volontari che lavorano con persone che vivono questo fenomeno di\nespulsione.

\n

I nostri pregiudizi sono andati in frantumi e, visti da vicino, abbiamo trovato molti\npunti di contatto con persone che tendiamo a nascondere e cacciare dalle vie delle\nnostre citt\u00e0. I poveri oggi vengono invisibilizzati e stigmatizzati perch\u00e9 costringono a\nfare i conti con le storture e i limiti di citt\u00e0 sempre pi\u00f9 esclusive e respingenti.

\n

Dalla ricerca \u00e8 scaturito uno spettacolo di narrazione nel quale si incarnano le voci,\ni silenzi, i corpi e le emozioni attraversate sul campo.\nSiamo alla fermata di un autobus che non sappiamo se passer\u00e0 mai. Lo aspettiamo\ninsieme a tre personaggi le cui vicende intrecciandosi raccontano come un prisma\nla marginalit\u00e0 interiore e sociale di Milano. Un giovane studente universitario della\nBocconi, alle prese con gli ultimi esami prima della laurea. Una giornalista precaria\ndi 30 anni, affaticata dai ritmi cittadini, che ha deciso di fare volontariato con le\npersone senza dimora. E infine uno strambo profeta, che maledice la follia che\ndomina la citt\u00e0 e offre a tutti una via di salvezza.

\n

Con lo spettacolo vogliamo riportare al centro la vita di chi sta ai margini provando\na mettere in discussione lo sguardo pietistico e di superiorit\u00e0 che riserviamo ai devianti. La voce e il vissuto degli ultimi ci interpellano: come ci poniamo di fronte al\ndolore dell'altro? E noi, i sani, come stiamo? E adesso, ancora, che fare?

", + "html_content_en": "
    \n
  • written and directed by Lorenzo Ponte
  • \n
  • with Tobia Dal Corso Polzot, Paola Galassi and Luca Oldani
  • \n
  • set design Davide Signorini
  • \n
  • costumes Giulia Rossena
  • \n
  • lights Emanuele Agliati
  • \n
  • sound design Gaetano Pappalardo and Simone Sigurani
  • \n
  • assistant director Filippo Capobianco,
  • \n
  • radio newspaper voice Pietro Adami
  • \n
  • with the artistic supervision of Giuliana Musso
  • \n
  • Pictures by Foto di Luca Del Pia
  • \n
  • production Teatro Franco Parenti
  • \n
\n

with the support of PRAXIS - Olinda/TeatroLaCucina - Winner of the Fantastic Theatrical Animals 2021 ZONA K tender as part of the IntercettAzioni project - Lombardy Artistic Residency Centre; Claudia\nLombardi Foundation for the theatre

\n

Observe and stay on the margins to understand what is at the center of our society.\nGuided by this idea, for 3 years we have undertaken a journey of understanding\nand approaching people's lives homeless in the city of Milan. Between 2019 and\n2021, the company joined psychologists, educators and volunteers who work with\npeople experiencing this phenomenon of expulsion.

\n

The research method was\ndivided into three different moments. A first part of participatory observation, in\nwhich the whole company accompanied the road units for a long period on night\noutings in the downtown streets. This work has allowed us to build relationships of\ntrust. The second part of the research consisted of going into the street to talk to\npeople without being framed voluntary associations, to try to distance themselves\nfrom a volunteer-assisted relationship. Finally, where possible, we conducted\ninterviews with homeless people, workers and volunteers and experts to try to build\na broader framework that would allow us to read homelessness as a social\nphenomenon and not as individual problem. Our prejudices were shattered and,\nseen up close, we found many points of contact with people who we tend to hide\nand chase from the streets of our cities. The poor today are invisibilized and\nstigmatized because they force us to deal with the distortions and limitations of\nincreasingly exclusive and repelling cities.

\n

From the research described above, a narrative show emerged in which voices,\nsilences, bodies and the emotions experienced on the field. We are at the stop of a bus that we don't know if it will ever pass. We wait for him together with three\ncharacters whose intertwining events tell like a prism the internal and social\nmarginality of Milan. A young Bocconi student, struggling with the last exams before\ngraduation. A 30-year-old precarious journalist, tired by the city's rhythms, she\ndecided to volunteer with homeless people. And finally a weirdo prophet, who\ncurses the madness that dominates the city and offers everyone a way of salvation.

\n

With the show we want to bring the life of those on the margins back to the center\nby trying to question the pietistic and superior look that we reserve for deviants. The\nvoice and experiences of the last challenge us: how do we do we face the pain of\nthe other? And how are we, the healthy ones? And now, what can we do?

" + }, + "quando_non_saremo_grandi": { + "name": [ + "Quando non saremo grandi", + "Once we won't be great" + ], + "category": [ + "Prosa", + "Theater" + ], + "main_picture": "Immagine locandina copia.jpeg", + "captions_it": { + "5 copia.jpg": "", + ".DS_Store": "", + "3 copia.jpg": "", + "4 copia.jpg": "", + "2 copia.jpg": "", + "7 copia.jpg": "", + "1 copia.jpg": "", + "6 copia.jpg": "" + }, + "captions_en": { + "5 copia.jpg": "", + ".DS_Store": "", + "3 copia.jpg": "", + "4 copia.jpg": "", + "2 copia.jpg": "", + "7 copia.jpg": "", + "1 copia.jpg": "", + "6 copia.jpg": "" + }, + "video_url": "", + "html_content_it": "
    \n
  • soggetto Giulia Lombezzi e Lorenzo Ponte
  • \n
  • drammaturgia Giulia Lombezzi
  • \n
  • con llaria Marchian\u00f2, Elia Galeotti
  • \n
  • regia Lorenzo Ponte
  • \n
  • scene e costumi Chiara Previato
  • \n
  • La voce dei Grandi \u00e8 di Alberto Mancioppi
  • \n
  • produzione Teatro Franco Parenti
  • \n
  • in collaborazione con Associazione Pier Lombardo
  • \n
  • Foto di Andea Salafia
  • \n
\n

Milano. Anno 2123. Uma e Mo, adolescenti, vivono nella comunit\u00e0 nomade\ndei Grandi. Mo \u00e8 il maggiore, ma \u00e8 Uma a occuparsi di lui, a farsi carico per\nentrambi di tutte le responsabilit\u00e0. Mo \u00e8 malato e ha un segreto.\nVivono in una Milano colpita dalla siccit\u00e0, priva di ordine sociale, dove le\nrisorse elettriche sono limitate e ogni cosa, a causa della crisi climatica, si \u00e8\nfermata. Devono lavorare sottostando a regole infinite e punteggi impietosi.

\n

Il premio agognato \u00e8 un trasferimento sul Pianeta B, dove regna il benessere,\ndove l'acqua non \u00e8 razionata e puoi accendere la luce quando vuoi. Mentre Uma si impegna per il passaggio al pianeta B, Mo vaneggia sul Fuori, il\nterritorio proibito oltre i Distretti, dove la natura sopravvive nonostante i\ncambiamenti climatici. Possibile che il Pianeta B sia l'unica speranza? E se\nnel Fuori ci fosse una possibilit\u00e0 di recuperare il nostro rapporto con la\nnatura, con la nostra umanit\u00e0?

\n

Maggio / Novembre 2023

", + "html_content_en": "
    \n
  • directed by Lorenzo Ponte
  • \n
  • subject Giulia Lombezzi and Lorenzo Ponte
  • \n
  • dramaturgy Giulia Lombezzi
  • \n
  • with llaria Marchian\u00f2, Elia Galeotti
  • \n
  • scenes and costumes Chiara Previato
  • \n
  • Voice of the Greats Alberto Mancioppi
  • \n
  • production Teatro Franco Parenti in collaboration with the Pier Lombardo Association
  • \n
  • Photo by Andea Salafia
  • \n
\n

Milan. Year 2123. Uma and Mo, teenagers, live in the nomadic community of\nthe Great Ones. Mo is the eldest, but it is Uma who takes care of him, who\ntakes on all the responsibilities for both of them. Mo is sick and has a secret.

\n

They live in a drought-stricken Milan, devoid of social order, where electricity\nresources are limited and everything, due to the climate crisis, has stopped.\nThey have to work under endless rules and merciless scores. The desired\nprize is a transfer to Planet B, where well-being reigns, where water is not\nrationed and you can turn on the light whenever you want. While Uma strives\nfor the transition to planet B, Mo raves about the Outside, the forbidden\nterritory beyond the Districts, where nature survives despite climate change.\nIs it possible that Planet B is the only hope? What if outside there was a\npossibility of recovering our relationship with nature, with our humanity?

\n

May / November 2023

" + }, + "idomeneo": { + "name": [ + "Idomeneo, re di Creta", + "Idomeneo, re di Creta" + ], + "category": [ + "Opera", + "Opera" + ], + "main_picture": "Locandina.jpg", + "captions_it": { + "15 b.jpg": "", + "11 a.jpg": "", + "8.jpg": "", + "11 b.jpg": "", + "9 copia.jpg": "", + "16 b.jpg": "", + "15.jpg": "", + "17.jpg": "", + "16.jpg": "", + "10 copia.jpg": "", + "21.jpg": "", + "22.jpg": "", + "13 copia.jpg": "", + "18.jpg": "", + "19.jpg": "", + "4.jpg": "", + "14 copia.jpg": "", + "5.jpg": "", + "7.jpg": "", + "6.jpg": "", + "2.jpg": "", + "12 copia.jpg": "", + "3.jpg": "", + "1.jpg": "" + }, + "captions_en": { + "15 b.jpg": "", + "11 a.jpg": "", + "8.jpg": "", + "11 b.jpg": "", + "9 copia.jpg": "", + "16 b.jpg": "", + "15.jpg": "", + "17.jpg": "", + "16.jpg": "", + "10 copia.jpg": "", + "21.jpg": "", + "22.jpg": "", + "13 copia.jpg": "", + "18.jpg": "", + "19.jpg": "", + "4.jpg": "", + "14 copia.jpg": "", + "5.jpg": "", + "7.jpg": "", + "6.jpg": "", + "2.jpg": "", + "12 copia.jpg": "", + "3.jpg": "", + "1.jpg": "" + }, + "video_url": "", + "html_content_it": "
    \n
  • Musica Wolfgang Amadeus Mozart
  • \n
  • \n

    Libretto Giambattista Varesco dal dramma di Danchet\n

    \n
  • \n
  • \n

    Nuova produzione Op\u00e9ra national de Lorraine

    \n
  • \n
  • \n

    OPERA NATIONAL DE LORRAINE ORCHESTRA E CORO\n

    \n
  • \n
  • \n

    Direttore Jakob Lehmann

    \n
  • \n
  • \n

    Regista Lorenzo Ponte\n

    \n
  • \n
  • \n

    Scene Alice Benazzi

    \n
  • \n
  • Costumi Giulia Rossena
  • \n
  • Luci Emanuele Agliati
  • \n
  • Assistente alle luci Alessandro Manni
  • \n
  • \n

    Foto di Simon Gosselin \n

    \n
  • \n
  • \n

    Direttore del coro Guillaume Fauch\u00e8re

    \n
  • \n
  • Direttore assistente William Le Sage
  • \n
\n


\n
    \n
  • Idomeneo Toby Spence
  • \n
  • damante Heloise Mas
  • \n
  • llia Siobhan Stagg
  • \n
  • Elettra Amanda Woodbury
  • \n
  • Arbace L\u00e9o Vermot-Desroches
  • \n
  • Il grande sacerdote* Wook Kang
  • \n
  • La voce di Nettuno Louis Morvan
  • \n
  • Donne Cretesi* Inna Jeskova e S\u00e9verine Maquaire
  • \n
  • Troiani* Yongwoo Jung e Jinhyuck Kim
  • \n
  • Coro lontano* Yongwoo Jung, IIl Ju Lee, Jinhyuck Kim e Christophe Sagnier
  • \n
  • Meda Rosabel Huguet
  • \n
\n

*Solisti del Coro

\n

I miti si tramandano in versioni sempre diverse a seconda del luogo e del tempo.\nDopo la seconda guerra mondiale, Christa Wolf inizi\u00f2 a riscrivere i miti da una prospettiva\nche mise in discussione l'egemonia dei padri. Nel 2023 la storia di Idomeneo \u00e8 ancora\ntutta da interrogare: uomini e donne sono nelle mani degli dei, le scelte di un re venaono\nattribuite a un mostro, il sacrificio umano \u00e8 accettabile e l'amore \u00e8 raccontato come uno\nstrumento capace di guarire tutto.

\n

Partiamo dal conflitto con cui si conclude l'opera: il Dio del mare ha risolto tutto per tutti\ntranne che per un solo personaggio. Elettra grida che la gioia e l'amore le sono stati rubati.\nQual \u00e8 il crimine per il quale promette vendetta? Credo che ci sia qualcosa di pi\u00f9 profondo\ndel rifiuto di damante a muoverla a una tale rabbia. C'\u00e8 una violenza nascosta in questa\nfamiglia. Il sacrificio del bambino viene presentato come un incidente. Invece questo\nsacrificio, questa violenza sono le fondamenta stesse della civilt\u00e0 cretese.

\n

Nell'opera mancano totalmente i riferimenti alla Regina di Creta, moglie di Idomeneo e\nmadre di damante. E difficile persino rintracciare la sua presenza in altre fonti letterarie.\nSappiamo che si chiamava Meda e che fu uccisa per tradimento. Qual \u00e8 stato il tradimento che le \u00e8 costato una damnatio memoriae? Meda fu uccisa perch\u00e9 vide, perch\u00e9 sapeva su\nquale crimine giace Creta: l'abuso perpetuo del figlio da parte del Padre.

\n

Intorno a Idomeneo e alla sua famiglia c'\u00e8 una cultura che accetta e celebra il sacrificio,\nNel nostro allestimento siamo negli anni '60. Il Grande Sacerdote e il sacrificio saranno\nrappresentati come parti della liturgia cristiana, la religione del Padre. L'uccisione dei\nbambini \u00e8 gi\u00e0 avvenuta quando Idomeneo abusava di loro. La religione nasconde la verit\u00e0\ne tiene lontano l'orrore dagli uomini e dalle donne. L'abuso \u00e8 possibile perch\u00e9 c'\u00e8 una\ncomunit\u00e0 che lo tollera. Il coro interpreta le famiglie che abitano nei dintorni di Idomeneo.\nVogliono una vita pacifica, minacciata dal dolore di questi bambini, e quindi hanno bisogno\ndi un rito che guarisca e riporti la pace.

\n

\u00c8 Elettra a riaprire la ferita. Lei \u00e8 una donna di 30/35 anni degli anni 80 che viagger\u00e0 nella\nmemoria per trovare responsabilit\u00e0: vedr\u00e0 e ci mostrer\u00e0 nuovamente la Regina Meda e\nquindi una possibilit\u00e0 di un vero nuovo ordine basato non sulla violenza e sul sacrificio, ma\nsulla tutela dei deboli.

\n

Il set racconta questo viaggio nella memoria attraverso i mezzi della fotografia analogica. Il\nlavoro sulla memoria diventa il lavoro di sviluppo di un'immagine. I passaggi narrativi\nsaranno scanditi dal procedimento fotografico. Inizieremo da un album di famiglia da cui\nviene cancellato il corpo della madre e vedremo il volto della madre alla fine dell'opera.

\n

Christa Wolf ha scritto: \"A poco a poco, quando inizi a sapere, inizi anche a ricordare.\nConoscere e ricordare sono la stessa cosa\". Mentre la storia scritta da Varesco procede,\nla messa in scena mette le parole e le azioni dei personaggi sotto una luce diversa.\nQuesta storia \u00e8 raccontata dal punto di vista di una donna ritenuta pazza. Questa \u00e8 una\nstoria sul valore della testimonianza, sulla memoria e sulla responsabilita.

\n

Settembre - Ottobre 2023

", + "html_content_en": "
    \n
  • Libretto Father Giambattista Varesco after the play by Danchet
  • \n
  • \n

    Music Wolfgang Amadeus Mozart\n

    \n
  • \n
  • \n

    New production Op\u00e9ra national de Lorraine

    \n
  • \n
  • \n

    OPERA NATIONAL DE LORRAINE ORCHESTRA AND CHORUS\n

    \n
  • \n
  • \n

    Conductor Jakob Lehmann

    \n
  • \n
  • \n

    Stage director Lorenzo Ponte\n

    \n
  • \n
  • \n

    Set design Alice Benazzi

    \n
  • \n
  • Costumes Giulia Rossena
  • \n
  • Lighting Emanuele Agliati
  • \n
  • Lighting assistant Alessandro Manni
  • \n
  • \n

    Pictures by Simon Gosselin\n

    \n
  • \n
  • \n

    Chorus master Guillaume Fauch\u00e8re

    \n
  • \n
  • \n

    Assistant conductor William Le Sage\n

    \n
  • \n
  • \n

    Idomeneo Toby Spence\ndamante H\u00e9lo\u00efse Mas

    \n
  • \n
  • llia Siobhan Stagg
  • \n
  • Electre Amanda Woodbury
  • \n
  • Arbace L\u00e9o Vermot-Desroches
  • \n
  • The High Priest* Wook Kang
  • \n
  • The Voice of Neptune Louis Morvan
  • \n
  • Cretan women\u00ae Inna Jeskova and S\u00e9verine Maquaire
  • \n
  • Trojans Yongwoo Jung and Jinhyuck Kim\nDistant chorus Yongwoo Jung, Ill Ju Lee, Jinhyuck Kim and Christophe Sagnier
  • \n
  • Meda Rosabel Huguet
  • \n
\n

*Opera Chorus soloists

\n

Muths have different versions. After World war II, Christa Wolf began to rewrite myths from\na perspective that questions the hegemony of the fathers. In 2023 the story of Idomeneo\nstill need to be questioned: men and women are in the hands of the gods, the choices of a\nking are blamed on a monster, human sacrifice is acceptable and love is told as a tool\ncapable to heal everything.

\n

We start from the conflict with which the opera ends: God apparently solves everything for\neveryone except for a single character. Electra cries out that joy and love have been stolen\nfrom her. What is the crime for which she pledges revenge? I believe there has to be\nsomething deeper than the love of Idamante. There is a hidden violence in this family. The\nsacrifice of the child is presented as an accident. Whereas this sacrifice, this violence are\nthe very foundations of the city.

\n

The opera is totally missing references to the Queen of Crete, wife of Idomeneo and\nmother of damante. It is even difficult to trace its presence in other literary sources. Her\nname was Meda and she was killed for treason. What was the betrayal that cost her a\ndamnatio memoriae? Meda was killed because she saw, because she knew on which\ncrime Crete lays: the perpetual abuse of the child by the Father. Around Idomeneo and his\nfamily there is a culture that accepts and celebrates the sacrifice. In our staging we are in\nthe 60's. The Great Priest and the sacrifice will be represented as parts of the Christian\nliturgy, the religion of the Father.

\n

The killing of the children has already taken place when Idomeneo abused of them.\nReligion hides the truth and keeps the horror away from men and women. The abuse is\npossible because there is a community who tolerates it. The chorus plays the families that\nlive nearby Idomeneo. They want a peaceful life, which is threatened by the pain of these\nchildren, and therefore they need a rite which heals and restore peace.\nElectra is the one who reopens the wound. She is a 30/35 years woman of the 80's who\nwill travel through memory to find responsibilities; she will see and show us again Queen\nMeda and therefore a possibility of a real new order based not on violence and sacrifice,\nbut on the protection of the weak.

\n

The set tell this trip into memory through the means of analogical photography. The work\non the memory becomes the work of developing a picture. The narrative passages will be\nmarked by the photographic process. We will start from a family album from which the\nbody of the mother is deleted and we will see the mother's face at the end of the opera.

\n

Christa Wolf wrote: \"Little by little, when you begin to know, you begin to remember too.\nKnowing and remembering are the same thing\". While the story of Varesco goes along, the\nstaging puts the words and the actions of the characters under a different light. This story\nis told from the perspective of a woman who is thought to be insane. This is a story about\nthe value of testimony, about memory and responsibility.

\n

September - October 2023

" + } + }, + "categories": [ + [ + "Opera", + "Opera" + ], + [ + "Prosa", + "Theater" + ] + ] +} \ No newline at end of file diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/INSTALLER b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/METADATA b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/METADATA new file mode 100644 index 0000000..56e9429 --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/METADATA @@ -0,0 +1,105 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 3.1.3 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/jinja/ +Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe >=2.0 +Provides-Extra: i18n +Requires-Dist: Babel >=2.7 ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + + {% endblock %} + + +Donate +------ + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://jinja.palletsprojects.com/ +- Changes: https://jinja.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Jinja2/ +- Source Code: https://github.com/pallets/jinja/ +- Issue Tracker: https://github.com/pallets/jinja/issues/ +- Chat: https://discord.gg/pallets diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/RECORD b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/RECORD new file mode 100644 index 0000000..eefb694 --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/RECORD @@ -0,0 +1,59 @@ +Jinja2-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-3.1.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-3.1.3.dist-info/METADATA,sha256=0cLNbRCI91jytc7Bzv3XAQfZzFDF2gxkJuH46eF5vew,3301 +Jinja2-3.1.3.dist-info/RECORD,, +Jinja2-3.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Jinja2-3.1.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +Jinja2-3.1.3.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 +Jinja2-3.1.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=NTBwMwsECrdHmxeXF7seusHLzrh6Ldn1A9qhS5cDuf0,1927 +jinja2/__pycache__/__init__.cpython-312.pyc,, +jinja2/__pycache__/_identifier.cpython-312.pyc,, +jinja2/__pycache__/async_utils.cpython-312.pyc,, +jinja2/__pycache__/bccache.cpython-312.pyc,, +jinja2/__pycache__/compiler.cpython-312.pyc,, +jinja2/__pycache__/constants.cpython-312.pyc,, +jinja2/__pycache__/debug.cpython-312.pyc,, +jinja2/__pycache__/defaults.cpython-312.pyc,, +jinja2/__pycache__/environment.cpython-312.pyc,, +jinja2/__pycache__/exceptions.cpython-312.pyc,, +jinja2/__pycache__/ext.cpython-312.pyc,, +jinja2/__pycache__/filters.cpython-312.pyc,, +jinja2/__pycache__/idtracking.cpython-312.pyc,, +jinja2/__pycache__/lexer.cpython-312.pyc,, +jinja2/__pycache__/loaders.cpython-312.pyc,, +jinja2/__pycache__/meta.cpython-312.pyc,, +jinja2/__pycache__/nativetypes.cpython-312.pyc,, +jinja2/__pycache__/nodes.cpython-312.pyc,, +jinja2/__pycache__/optimizer.cpython-312.pyc,, +jinja2/__pycache__/parser.cpython-312.pyc,, +jinja2/__pycache__/runtime.cpython-312.pyc,, +jinja2/__pycache__/sandbox.cpython-312.pyc,, +jinja2/__pycache__/tests.cpython-312.pyc,, +jinja2/__pycache__/utils.cpython-312.pyc,, +jinja2/__pycache__/visitor.cpython-312.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=dFcmh6lMNfbh7eLKrBio8JqAKLHdZbpCuurFN4OERtY,2447 +jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 +jinja2/compiler.py,sha256=PJzYdRLStlEOqmnQs1YxlizPrJoj3jTZuUleREn6AIQ,72199 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=0qldX3VQKZcm6lgn7zHz94oRFow7YPYERiqkquomNjU,61253 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=5fnMpllaXkfm2P_93RIvi-OnK7Tk8mCW8Du-GcD12Hc,31844 +jinja2/filters.py,sha256=vYjKb2zaPShvYtn_LpSmqfS8SScbrA_KOanNibsMDIE,53862 +jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 +jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 +jinja2/loaders.py,sha256=ayAwxfrA1SAffQta0nwSDm3TDT4KYiIGN_D9Z45B310,23085 +jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 +jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 +jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 +jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 +jinja2/parser.py,sha256=Y199wPL-G67gJoi5G_5sHuu9uEP1PJkjjLEW_xTH8-k,39736 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=_6LkKIWFJjQdqlrgA3K39zBFQ-7Orm3wGDm96RwxQoE,33406 +jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 +jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 +jinja2/utils.py,sha256=IMwRIcN1SsTw2-jdQtlH2KzNABsXZBW_-tnFXafQBvY,23933 +jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/REQUESTED b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/WHEEL b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt new file mode 100644 index 0000000..7b9666c --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract[i18n] diff --git a/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/top_level.txt b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/lib/python3.12/site-packages/Jinja2-3.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/INSTALLER b/lib/python3.12/site-packages/Markdown-3.6.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/LICENSE.md b/lib/python3.12/site-packages/Markdown-3.6.dist-info/LICENSE.md new file mode 100644 index 0000000..6249d60 --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/LICENSE.md @@ -0,0 +1,30 @@ +BSD 3-Clause License + +Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later) +Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) +Copyright 2004 Manfred Stienstra (the original version) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/METADATA b/lib/python3.12/site-packages/Markdown-3.6.dist-info/METADATA new file mode 100644 index 0000000..516d18d --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/METADATA @@ -0,0 +1,146 @@ +Metadata-Version: 2.1 +Name: Markdown +Version: 3.6 +Summary: Python implementation of John Gruber's Markdown. +Author: Manfred Stienstra, Yuri Takhteyev +Author-email: Waylan limberg +Maintainer: Isaac Muse +Maintainer-email: Waylan Limberg +License: BSD 3-Clause License + + Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later) + Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) + Copyright 2004 Manfred Stienstra (the original version) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Project-URL: Homepage, https://Python-Markdown.github.io/ +Project-URL: Documentation, https://Python-Markdown.github.io/ +Project-URL: Repository, https://github.com/Python-Markdown/markdown +Project-URL: Issue Tracker, https://github.com/Python-Markdown/markdown/issues +Project-URL: Changelog, https://python-markdown.github.io/changelog/ +Keywords: markdown,markdown-parser,python-markdown,markdown-to-html +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Communications :: Email :: Filters +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries +Classifier: Topic :: Internet :: WWW/HTTP :: Site Management +Classifier: Topic :: Software Development :: Documentation +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Filters +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: Markdown +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE.md +Requires-Dist: importlib-metadata >=4.4 ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: mkdocs >=1.5 ; extra == 'docs' +Requires-Dist: mkdocs-nature >=0.6 ; extra == 'docs' +Requires-Dist: mdx-gh-links >=0.2 ; extra == 'docs' +Requires-Dist: mkdocstrings[python] ; extra == 'docs' +Requires-Dist: mkdocs-gen-files ; extra == 'docs' +Requires-Dist: mkdocs-section-index ; extra == 'docs' +Requires-Dist: mkdocs-literate-nav ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: coverage ; extra == 'testing' +Requires-Dist: pyyaml ; extra == 'testing' + +[Python-Markdown][] +=================== + +[![Build Status][build-button]][build] +[![Coverage Status][codecov-button]][codecov] +[![Latest Version][mdversion-button]][md-pypi] +[![Python Versions][pyversion-button]][md-pypi] +[![BSD License][bsdlicense-button]][bsdlicense] +[![Code of Conduct][codeofconduct-button]][Code of Conduct] + +[build-button]: https://github.com/Python-Markdown/markdown/workflows/CI/badge.svg?event=push +[build]: https://github.com/Python-Markdown/markdown/actions?query=workflow%3ACI+event%3Apush +[codecov-button]: https://codecov.io/gh/Python-Markdown/markdown/branch/master/graph/badge.svg +[codecov]: https://codecov.io/gh/Python-Markdown/markdown +[mdversion-button]: https://img.shields.io/pypi/v/Markdown.svg +[md-pypi]: https://pypi.org/project/Markdown/ +[pyversion-button]: https://img.shields.io/pypi/pyversions/Markdown.svg +[bsdlicense-button]: https://img.shields.io/badge/license-BSD-yellow.svg +[bsdlicense]: https://opensource.org/licenses/BSD-3-Clause +[codeofconduct-button]: https://img.shields.io/badge/code%20of%20conduct-contributor%20covenant-green.svg?style=flat-square +[Code of Conduct]: https://github.com/Python-Markdown/markdown/blob/master/CODE_OF_CONDUCT.md + +This is a Python implementation of John Gruber's [Markdown][]. +It is almost completely compliant with the reference implementation, +though there are a few known issues. See [Features][] for information +on what exactly is supported and what is not. Additional features are +supported by the [Available Extensions][]. + +[Python-Markdown]: https://Python-Markdown.github.io/ +[Markdown]: https://daringfireball.net/projects/markdown/ +[Features]: https://Python-Markdown.github.io#Features +[Available Extensions]: https://Python-Markdown.github.io/extensions + +Documentation +------------- + +```bash +pip install markdown +``` +```python +import markdown +html = markdown.markdown(your_text_string) +``` + +For more advanced [installation] and [usage] documentation, see the `docs/` directory +of the distribution or the project website at . + +[installation]: https://python-markdown.github.io/install/ +[usage]: https://python-markdown.github.io/reference/ + +See the change log at . + +Support +------- + +You may report bugs, ask for help, and discuss various other issues on the [bug tracker][]. + +[bug tracker]: https://github.com/Python-Markdown/markdown/issues + +Code of Conduct +--------------- + +Everyone interacting in the Python-Markdown project's code bases, issue trackers, +and mailing lists is expected to follow the [Code of Conduct]. diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/RECORD b/lib/python3.12/site-packages/Markdown-3.6.dist-info/RECORD new file mode 100644 index 0000000..6715a72 --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/RECORD @@ -0,0 +1,75 @@ +../../../bin/markdown_py,sha256=4ZgxDp0aOu2bvhxlP5wPJakDcatfwd6VqZhBNEDSA6U,243 +Markdown-3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Markdown-3.6.dist-info/LICENSE.md,sha256=e6TrbRCzKy0R3OE4ITQDUc27swuozMZ4Qdsv_Ybnmso,1650 +Markdown-3.6.dist-info/METADATA,sha256=8_ETqzTxcOemQXj7ujUabMFcDBDGtsRrccFDr1-XWvc,7040 +Markdown-3.6.dist-info/RECORD,, +Markdown-3.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Markdown-3.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +Markdown-3.6.dist-info/entry_points.txt,sha256=lMEyiiA_ZZyfPCBlDviBl-SiU0cfoeuEKpwxw361sKQ,1102 +Markdown-3.6.dist-info/top_level.txt,sha256=IAxs8x618RXoH1uCqeLLxXsDefJvE_mIibr_M4sOlyk,9 +markdown/__init__.py,sha256=dfzwwdpG9L8QLEPBpLFPIHx_BN056aZXp9xZifTxYIU,1777 +markdown/__main__.py,sha256=innFBxRqwPBNxG1zhKktJji4bnRKtVyYYd30ID13Tcw,5859 +markdown/__meta__.py,sha256=DqtqnYYLznrkvI1G4JalBc4WpgOp48naNoG9zlMWZas,1712 +markdown/__pycache__/__init__.cpython-312.pyc,, +markdown/__pycache__/__main__.cpython-312.pyc,, +markdown/__pycache__/__meta__.cpython-312.pyc,, +markdown/__pycache__/blockparser.cpython-312.pyc,, +markdown/__pycache__/blockprocessors.cpython-312.pyc,, +markdown/__pycache__/core.cpython-312.pyc,, +markdown/__pycache__/htmlparser.cpython-312.pyc,, +markdown/__pycache__/inlinepatterns.cpython-312.pyc,, +markdown/__pycache__/postprocessors.cpython-312.pyc,, +markdown/__pycache__/preprocessors.cpython-312.pyc,, +markdown/__pycache__/serializers.cpython-312.pyc,, +markdown/__pycache__/test_tools.cpython-312.pyc,, +markdown/__pycache__/treeprocessors.cpython-312.pyc,, +markdown/__pycache__/util.cpython-312.pyc,, +markdown/blockparser.py,sha256=j4CQImVpiq7g9pz8wCxvzT61X_T2iSAjXupHJk8P3eA,5728 +markdown/blockprocessors.py,sha256=koY5rq8DixzBCHcquvZJp6x2JYyBGjrwxMWNZhd6D2U,27013 +markdown/core.py,sha256=DyyzDsmd-KcuEp8ZWUKJAeUCt7B7G3J3NeqZqp3LphI,21335 +markdown/extensions/__init__.py,sha256=9z1khsdKCVrmrJ_2GfxtPAdjD3FyMe5vhC7wmM4O9m0,4822 +markdown/extensions/__pycache__/__init__.cpython-312.pyc,, +markdown/extensions/__pycache__/abbr.cpython-312.pyc,, +markdown/extensions/__pycache__/admonition.cpython-312.pyc,, +markdown/extensions/__pycache__/attr_list.cpython-312.pyc,, +markdown/extensions/__pycache__/codehilite.cpython-312.pyc,, +markdown/extensions/__pycache__/def_list.cpython-312.pyc,, +markdown/extensions/__pycache__/extra.cpython-312.pyc,, +markdown/extensions/__pycache__/fenced_code.cpython-312.pyc,, +markdown/extensions/__pycache__/footnotes.cpython-312.pyc,, +markdown/extensions/__pycache__/legacy_attrs.cpython-312.pyc,, +markdown/extensions/__pycache__/legacy_em.cpython-312.pyc,, +markdown/extensions/__pycache__/md_in_html.cpython-312.pyc,, +markdown/extensions/__pycache__/meta.cpython-312.pyc,, +markdown/extensions/__pycache__/nl2br.cpython-312.pyc,, +markdown/extensions/__pycache__/sane_lists.cpython-312.pyc,, +markdown/extensions/__pycache__/smarty.cpython-312.pyc,, +markdown/extensions/__pycache__/tables.cpython-312.pyc,, +markdown/extensions/__pycache__/toc.cpython-312.pyc,, +markdown/extensions/__pycache__/wikilinks.cpython-312.pyc,, +markdown/extensions/abbr.py,sha256=JqFOfU7JlhIFY06-nZnSU0wDqneFKKWMe95eXB-iLtc,3250 +markdown/extensions/admonition.py,sha256=Hqcn3I8JG0i-OPWdoqI189TmlQRgH6bs5PmpCANyLlg,6547 +markdown/extensions/attr_list.py,sha256=t3PrgAr5Ebldnq3nJNbteBt79bN0ccXS5RemmQfUZ9g,7820 +markdown/extensions/codehilite.py,sha256=ChlmpM6S--j-UK7t82859UpYjm8EftdiLqmgDnknyes,13503 +markdown/extensions/def_list.py,sha256=J3NVa6CllfZPsboJCEycPyRhtjBHnOn8ET6omEvVlDo,4029 +markdown/extensions/extra.py,sha256=1vleT284kued4HQBtF83IjSumJVo0q3ng6MjTkVNfNQ,2163 +markdown/extensions/fenced_code.py,sha256=-fYSmRZ9DTYQ8HO9b_78i47kVyVu6mcVJlqVTMdzvo4,8300 +markdown/extensions/footnotes.py,sha256=bRFlmIBOKDI5efG1jZfDkMoV2osfqWip1rN1j2P-mMg,16710 +markdown/extensions/legacy_attrs.py,sha256=oWcyNrfP0F6zsBoBOaD5NiwrJyy4kCpgQLl12HA7JGU,2788 +markdown/extensions/legacy_em.py,sha256=-Z_w4PEGSS-Xg-2-BtGAnXwwy5g5GDgv2tngASnPgxg,1693 +markdown/extensions/md_in_html.py,sha256=y4HEWEnkvfih22fojcaJeAmjx1AtF8N-a_jb6IDFfts,16546 +markdown/extensions/meta.py,sha256=v_4Uq7nbcQ76V1YAvqVPiNLbRLIQHJsnfsk-tN70RmY,2600 +markdown/extensions/nl2br.py,sha256=9KKcrPs62c3ENNnmOJZs0rrXXqUtTCfd43j1_OPpmgU,1090 +markdown/extensions/sane_lists.py,sha256=ogAKcm7gEpcXV7fSTf8JZH5YdKAssPCEOUzdGM3C9Tw,2150 +markdown/extensions/smarty.py,sha256=yqT0OiE2AqYrqqZtcUFFmp2eJsQHomiKzgyG2JFb9rI,11048 +markdown/extensions/tables.py,sha256=oTDvGD1qp9xjVWPGYNgDBWe9NqsX5gS6UU5wUsQ1bC8,8741 +markdown/extensions/toc.py,sha256=PGg-EqbBubm3n0b633r8Xa9kc6JIdbo20HGAOZ6GEl8,18322 +markdown/extensions/wikilinks.py,sha256=j7D2sozica6sqXOUa_GuAXqIzxp-7Hi60bfXymiuma8,3285 +markdown/htmlparser.py,sha256=dEr6IE7i9b6Tc1gdCLZGeWw6g6-E-jK1Z4KPj8yGk8Q,14332 +markdown/inlinepatterns.py,sha256=7_HF5nTOyQag_CyBgU4wwmuI6aMjtadvGadyS9IP21w,38256 +markdown/postprocessors.py,sha256=eYi6eW0mGudmWpmsW45hduLwX66Zr8Bf44WyU9vKp-I,4807 +markdown/preprocessors.py,sha256=pq5NnHKkOSVQeIo-ajC-Yt44kvyMV97D04FBOQXctJM,3224 +markdown/serializers.py,sha256=YtAFYQoOdp_TAmYGow6nBo0eB6I-Sl4PTLdLDfQJHwQ,7174 +markdown/test_tools.py,sha256=MtN4cf3ZPDtb83wXLTol-3q3aIGRIkJ2zWr6fd-RgVE,8662 +markdown/treeprocessors.py,sha256=o4dnoZZsIeVV8qR45Njr8XgwKleWYDS5pv8dKQhJvv8,17651 +markdown/util.py,sha256=vJ1E0xjMzDAlTqLUSJWgdEvxdQfLXDEYUssOQMw9kPQ,13929 diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/REQUESTED b/lib/python3.12/site-packages/Markdown-3.6.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/WHEEL b/lib/python3.12/site-packages/Markdown-3.6.dist-info/WHEEL new file mode 100644 index 0000000..bab98d6 --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/entry_points.txt b/lib/python3.12/site-packages/Markdown-3.6.dist-info/entry_points.txt new file mode 100644 index 0000000..be3bd8f --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/entry_points.txt @@ -0,0 +1,22 @@ +[console_scripts] +markdown_py = markdown.__main__:run + +[markdown.extensions] +abbr = markdown.extensions.abbr:AbbrExtension +admonition = markdown.extensions.admonition:AdmonitionExtension +attr_list = markdown.extensions.attr_list:AttrListExtension +codehilite = markdown.extensions.codehilite:CodeHiliteExtension +def_list = markdown.extensions.def_list:DefListExtension +extra = markdown.extensions.extra:ExtraExtension +fenced_code = markdown.extensions.fenced_code:FencedCodeExtension +footnotes = markdown.extensions.footnotes:FootnoteExtension +legacy_attrs = markdown.extensions.legacy_attrs:LegacyAttrExtension +legacy_em = markdown.extensions.legacy_em:LegacyEmExtension +md_in_html = markdown.extensions.md_in_html:MarkdownInHtmlExtension +meta = markdown.extensions.meta:MetaExtension +nl2br = markdown.extensions.nl2br:Nl2BrExtension +sane_lists = markdown.extensions.sane_lists:SaneListExtension +smarty = markdown.extensions.smarty:SmartyExtension +tables = markdown.extensions.tables:TableExtension +toc = markdown.extensions.toc:TocExtension +wikilinks = markdown.extensions.wikilinks:WikiLinkExtension diff --git a/lib/python3.12/site-packages/Markdown-3.6.dist-info/top_level.txt b/lib/python3.12/site-packages/Markdown-3.6.dist-info/top_level.txt new file mode 100644 index 0000000..0918c97 --- /dev/null +++ b/lib/python3.12/site-packages/Markdown-3.6.dist-info/top_level.txt @@ -0,0 +1 @@ +markdown diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/METADATA b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/METADATA new file mode 100644 index 0000000..dfe37d5 --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.5 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Chat: https://discord.gg/pallets diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/RECORD b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/RECORD new file mode 100644 index 0000000..2f91c3e --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003 +MarkupSafe-2.1.5.dist-info/RECORD,, +MarkupSafe-2.1.5.dist-info/WHEEL,sha256=1_erwh2TCU3TrYzgBQGCtZskLEmw2vbfn7Xu2mHHvyU,111 +MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958 +markupsafe/__pycache__/__init__.cpython-312.pyc,, +markupsafe/__pycache__/_native.cpython-312.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-312-darwin.so,sha256=zO55W2sOSohVnMko0CD2bITP4v6UpfeRj-x2I_8pGkA,35208 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL new file mode 100644 index 0000000..0271fd6 --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp312-cp312-macosx_10_9_x86_64 + diff --git a/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/lib/python3.12/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/INSTALLER b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/LICENSE b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/LICENSE new file mode 100644 index 0000000..c270f2c --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/LICENSE @@ -0,0 +1,20 @@ +Original work Copyright (C) 2011-2018 by Hong Minhee +Modified work Copyright (C) 2019-2023 by E. McConville + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/METADATA b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/METADATA new file mode 100644 index 0000000..de7327d --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: Wand +Version: 0.6.13 +Summary: Ctypes-based simple MagickWand API binding for Python +Home-page: http://wand-py.org/ +Author: Hong Minhee +Author-email: hongminhee@member.fsf.org +Maintainer: E. McConville +Maintainer-email: emcconville@emcconville.com +License: MIT License +Project-URL: Documentation, https://docs.wand-py.org +Project-URL: Source, https://github.com/emcconville/wand +Project-URL: Tracker, https://github.com/emcconville/wand/issues +Keywords: ImageMagick ctypes +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: Stackless +Classifier: Topic :: Multimedia :: Graphics +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: doc +Requires-Dist: Sphinx (>=5.3.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: pytest (>=7.2.0) ; extra == 'test' + +.. image:: https://docs.wand-py.org/en/latest/_static/wand.png + :width: 120 + :height: 120 + +Wand_ +===== + +Wand is a ``ctypes``-based simple ImageMagick_ binding for Python, +supporting 2.7, 3.3+, and PyPy. All functionalities of MagickWand API are +implemented in Wand. + +You can install the package from PyPI_ by using ``pip``: + +.. code-block:: console + + $ pip install Wand + +Or would you like to enjoy the bleeding edge? Check out the head +revision of the source code from the `GitHub repository`__: + +.. code-block:: console + + $ git clone git://github.com/emcconville/wand.git + $ cd wand/ + $ python setup.py install + +.. _Wand: http://wand-py.org/ +.. _ImageMagick: https://www.imagemagick.org/ +.. _PyPI: https://pypi.python.org/pypi/Wand +__ https://github.com/emcconville/wand + + +Docs +---- + +Recent version + https://docs.wand-py.org/ + +Development version + https://docs.wand-py.org/en/latest/ + + .. image:: https://readthedocs.org/projects/wand/badge/ + :alt: Documentation Status + :target: https://docs.wand-py.org/en/latest/ + + +Community +--------- + +Website + http://wand-py.org/ + +GitHub + https://github.com/emcconville/wand + +Package Index (Cheeseshop) + https://pypi.python.org/pypi/Wand + + .. image:: https://badge.fury.io/py/Wand.svg? + :alt: Latest PyPI version + :target: https://pypi.python.org/pypi/Wand + +Discord + https://discord.gg/wtDWDE9fXK + +Stack Overflow tag (Q&A) + http://stackoverflow.com/questions/tagged/wand + +Continuous Integration (Travis CI) + https://app.travis-ci.com/emcconville/wand + + .. image:: https://app.travis-ci.com/emcconville/wand.svg?branch=master + :alt: Build Status + :target: https://app.travis-ci.com/emcconville/wand + +Continuous Integration (GitHub Actions) + https://github.com/emcconville/wand/actions + + .. image:: https://github.com/emcconville/wand/workflows/Wand%20CI/badge.svg + :alt: Build Status + :target: https://github.com/emcconville/wand/actions?query=workflow%3A%22Wand+CI%22 + +Code Coverage + https://coveralls.io/r/emcconville/wand + + .. image:: https://coveralls.io/repos/github/emcconville/wand/badge.svg?branch=master + :target: https://coveralls.io/github/emcconville/wand?branch=master diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/RECORD b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/RECORD new file mode 100644 index 0000000..86ab4e2 --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/RECORD @@ -0,0 +1,53 @@ +Wand-0.6.13.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Wand-0.6.13.dist-info/LICENSE,sha256=LApHI5GF4xKeFcpRi4lLV5DPNhJG7jO9M0B0PLsdr2c,1183 +Wand-0.6.13.dist-info/METADATA,sha256=oeM5ek_GiYCwvpVcIAs4jy1n3xVwMggUNWmXy1wqzA8,3963 +Wand-0.6.13.dist-info/RECORD,, +Wand-0.6.13.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Wand-0.6.13.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110 +Wand-0.6.13.dist-info/top_level.txt,sha256=uFTymN2uxamdZLu2fxZzaBcGwv7WW9v60YcsATzndig,5 +wand/__init__.py,sha256=bEmSKTbdilJXM5PTgsuaqvpliBlmy2of5f77SJMKRh8,202 +wand/__pycache__/__init__.cpython-312.pyc,, +wand/__pycache__/api.cpython-312.pyc,, +wand/__pycache__/assertions.cpython-312.pyc,, +wand/__pycache__/color.cpython-312.pyc,, +wand/__pycache__/compat.cpython-312.pyc,, +wand/__pycache__/display.cpython-312.pyc,, +wand/__pycache__/drawing.cpython-312.pyc,, +wand/__pycache__/exceptions.cpython-312.pyc,, +wand/__pycache__/font.cpython-312.pyc,, +wand/__pycache__/image.cpython-312.pyc,, +wand/__pycache__/resource.cpython-312.pyc,, +wand/__pycache__/sequence.cpython-312.pyc,, +wand/__pycache__/version.cpython-312.pyc,, +wand/api.py,sha256=BXkdzrPqIA0inKkAuuvBFQfjSVKvbX0vAeBLwA1o6ek,10212 +wand/assertions.py,sha256=Ou1l9us4pgxhu-StOivBSMU4zoSKvcjY0WzefQtE0ZE,4721 +wand/cdefs/__init__.py,sha256=YaCYVyNhimXKrD5xWLrAmaMsBAr0QrnVshC7b_vfIPE,126 +wand/cdefs/__pycache__/__init__.cpython-312.pyc,, +wand/cdefs/__pycache__/core.cpython-312.pyc,, +wand/cdefs/__pycache__/drawing_wand.cpython-312.pyc,, +wand/cdefs/__pycache__/magick_image.cpython-312.pyc,, +wand/cdefs/__pycache__/magick_property.cpython-312.pyc,, +wand/cdefs/__pycache__/magick_wand.cpython-312.pyc,, +wand/cdefs/__pycache__/pixel_iterator.cpython-312.pyc,, +wand/cdefs/__pycache__/pixel_wand.cpython-312.pyc,, +wand/cdefs/__pycache__/structures.cpython-312.pyc,, +wand/cdefs/__pycache__/wandtypes.cpython-312.pyc,, +wand/cdefs/core.py,sha256=raahBqwXxAndWUd3a4K_wiwx5hP07Hm0j2SBPSs4Yys,5783 +wand/cdefs/drawing_wand.py,sha256=hDIy9UnNOuCNuUj0nhelHRSTCh3Gd8Td5uu4Ng9_vQk,12343 +wand/cdefs/magick_image.py,sha256=lExHUubi3qo7XB53g-r51xSJTUf1RV5M4fDtvjfxZyE,53527 +wand/cdefs/magick_property.py,sha256=YdE5asyvGnmB2MiXRafGepK5sS5U0tSqWUoRm5osx7Q,8659 +wand/cdefs/magick_wand.py,sha256=cXrruT_7sUfOh7J603bjJDyjgZninUjbbKmkJVRcsME,2477 +wand/cdefs/pixel_iterator.py,sha256=6At0KxkCJ8XDwyud9zmMBViFG1yJqa55_9ob9jxXT2Q,1797 +wand/cdefs/pixel_wand.py,sha256=3ilKZhDkB6uPa1_0ojuHI94mW5iX6Kikv5Js_60WNK4,7553 +wand/cdefs/structures.py,sha256=-1KlazZv0ErnKCdQ8DMpF02AFPIbqbdlI8dlALm0-Jo,6787 +wand/cdefs/wandtypes.py,sha256=0_VgrY2IurGmaRcsPYFKVDPpqekRn4upQFZEXoQqoPw,1400 +wand/color.py,sha256=YaiApbRC1RoUbHd12Q7gtzYjqrnqHXuSlCoB3OjoBhM,24509 +wand/compat.py,sha256=4hYn7AdKfVNxhLiazNXZsePk5HSI2ZYEmcmshVhaplY,4571 +wand/display.py,sha256=mhlxoWKjGbyfliHAEbeeDdFLsgS0CYsPB91TbHfY258,2450 +wand/drawing.py,sha256=dQUv8hn5iSke6Mijj10G2hvPg0Udzpghx5wexfuTkQU,80107 +wand/exceptions.py,sha256=ZtD_15ij58SYXp7QXMxbXp8291vYH0k5MFQJPflICdU,11165 +wand/font.py,sha256=8auFsXmnLppE6TDvopXHCg430ZK6NkqEGqEkVkaPgsk,4021 +wand/image.py,sha256=5dVN50SufrIwlkmRBzU1eQVSr0OuQDVoZDy7WP2St4A,431410 +wand/resource.py,sha256=NrlAzL4QnyxeQp-uZS1WT4HA1kqf1y9QJ6tluZowHmg,11805 +wand/sequence.py,sha256=ewZnCuR7rOeLQTp5Ix34dxu5huiCEx5GUVjEdgDzDKU,13183 +wand/version.py,sha256=N-YjDddbg9jLv_pSgBomtDGAHMFtz8pDHTWQG9HrS-o,10692 diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/REQUESTED b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/WHEEL b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/WHEEL new file mode 100644 index 0000000..9d8f872 --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.12/site-packages/Wand-0.6.13.dist-info/top_level.txt b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/top_level.txt new file mode 100644 index 0000000..375e1d6 --- /dev/null +++ b/lib/python3.12/site-packages/Wand-0.6.13.dist-info/top_level.txt @@ -0,0 +1 @@ +wand diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/INSTALLER b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/METADATA b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/METADATA new file mode 100644 index 0000000..857ca6a --- /dev/null +++ b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/METADATA @@ -0,0 +1,27 @@ +Metadata-Version: 2.1 +Name: fpdf +Version: 1.7.2 +Summary: Simple PDF generation for Python +Home-page: http://code.google.com/p/pyfpdf +Download-URL: https://github.com/reingart/pyfpdf/tarball/1.7.2 +Author: Olivier PLATHEY ported by Max +Author-email: maxpat78@yahoo.it +Maintainer: Mariano Reingart +Maintainer-email: reingart@gmail.com +License: LGPLv3+ +Keywords: pdf,unicode,png,jpg,ttf +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: PHP Classes +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Multimedia :: Graphics + diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/RECORD b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/RECORD new file mode 100644 index 0000000..f7e6c38 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/RECORD @@ -0,0 +1,22 @@ +fpdf-1.7.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fpdf-1.7.2.dist-info/METADATA,sha256=uC7PCusjTBkhFZeHhnBnD3uhgg2oKFS5xQUayFX2_p0,1146 +fpdf-1.7.2.dist-info/RECORD,, +fpdf-1.7.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fpdf-1.7.2.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110 +fpdf-1.7.2.dist-info/top_level.txt,sha256=5LInphvlslEpMUNNFUFlIxUmargkp4k1WqQnoDBAF-w,5 +fpdf/__init__.py,sha256=4IFfKo-doXEmaBsycnxiygWlU_mLBl8qZ_0KWu1axq0,415 +fpdf/__pycache__/__init__.cpython-312.pyc,, +fpdf/__pycache__/fonts.cpython-312.pyc,, +fpdf/__pycache__/fpdf.cpython-312.pyc,, +fpdf/__pycache__/html.cpython-312.pyc,, +fpdf/__pycache__/php.cpython-312.pyc,, +fpdf/__pycache__/py3k.cpython-312.pyc,, +fpdf/__pycache__/template.cpython-312.pyc,, +fpdf/__pycache__/ttfonts.cpython-312.pyc,, +fpdf/fonts.py,sha256=zldged9bndZMJZkg9sXb1nF6vv8i7KhMBrpJMOcKDec,26573 +fpdf/fpdf.py,sha256=qoNd_YEn0mBSQObG8eACOPNXDvTweJlDFfyw6LQi7vU,75624 +fpdf/html.py,sha256=jYqV0oatI7e5pS4VHswH9Ue0fx9RYdn4QiFkG8Fp3X8,14733 +fpdf/php.py,sha256=0UwFm1tR7DSOXH8xeNOAboaskytznL6Dpcp1UyI63yg,1516 +fpdf/py3k.py,sha256=A65E5L989fKEYoEfQDYDI8-hpCQOqALjMBFyspZsvJg,1602 +fpdf/template.py,sha256=8AJ_-oYLBh2ZaIOXQrFtMp9sWyZajgpjSBJDWxqkL2U,9282 +fpdf/ttfonts.py,sha256=dRwaAJF563I5loy5glrw12A9GTSRpD3U-MYfZC4TbJE,40412 diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/REQUESTED b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/WHEEL b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/WHEEL new file mode 100644 index 0000000..832be11 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/top_level.txt b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/top_level.txt new file mode 100644 index 0000000..744c7fb --- /dev/null +++ b/lib/python3.12/site-packages/fpdf-1.7.2.dist-info/top_level.txt @@ -0,0 +1 @@ +fpdf diff --git a/lib/python3.12/site-packages/fpdf/__init__.py b/lib/python3.12/site-packages/fpdf/__init__.py new file mode 100644 index 0000000..6c8a431 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"FPDF for python" + +__license__ = "LGPL 3.0" +__version__ = "1.7.2" + +from .fpdf import FPDF, FPDF_FONT_DIR, FPDF_VERSION, SYSTEM_TTFONTS, set_global, FPDF_CACHE_MODE, FPDF_CACHE_DIR +try: + from .html import HTMLMixin +except ImportError: + import warnings + warnings.warn("web2py gluon package not installed, required for html2pdf") + +from .template import Template diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/__init__.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0311fd7 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/__init__.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/fonts.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/fonts.cpython-312.pyc new file mode 100644 index 0000000..03f5d44 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/fonts.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/fpdf.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/fpdf.cpython-312.pyc new file mode 100644 index 0000000..9163be4 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/fpdf.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/html.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/html.cpython-312.pyc new file mode 100644 index 0000000..314acfa Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/html.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/php.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/php.cpython-312.pyc new file mode 100644 index 0000000..8661497 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/php.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/py3k.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/py3k.cpython-312.pyc new file mode 100644 index 0000000..0c54609 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/py3k.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/template.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/template.cpython-312.pyc new file mode 100644 index 0000000..2ce15e6 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/template.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/__pycache__/ttfonts.cpython-312.pyc b/lib/python3.12/site-packages/fpdf/__pycache__/ttfonts.cpython-312.pyc new file mode 100644 index 0000000..e35b6d4 Binary files /dev/null and b/lib/python3.12/site-packages/fpdf/__pycache__/ttfonts.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/fpdf/fonts.py b/lib/python3.12/site-packages/fpdf/fonts.py new file mode 100644 index 0000000..aeb8af6 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/fonts.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# -*- coding: latin-1 -*- + +# Fonts: + +fpdf_charwidths = {} + +fpdf_charwidths['courier']={} + +for i in range(0,256): + fpdf_charwidths['courier'][chr(i)]=600 + fpdf_charwidths['courierB']=fpdf_charwidths['courier'] + fpdf_charwidths['courierI']=fpdf_charwidths['courier'] + fpdf_charwidths['courierBI']=fpdf_charwidths['courier'] + +fpdf_charwidths['helvetica']={ + '\x00':278,'\x01':278,'\x02':278,'\x03':278,'\x04':278,'\x05':278,'\x06':278,'\x07':278,'\x08':278,'\t':278,'\n':278,'\x0b':278,'\x0c':278,'\r':278,'\x0e':278,'\x0f':278,'\x10':278,'\x11':278,'\x12':278,'\x13':278,'\x14':278,'\x15':278, + '\x16':278,'\x17':278,'\x18':278,'\x19':278,'\x1a':278,'\x1b':278,'\x1c':278,'\x1d':278,'\x1e':278,'\x1f':278,' ':278,'!':278,'"':355,'#':556,'$':556,'%':889,'&':667,'\'':191,'(':333,')':333,'*':389,'+':584, + ',':278,'-':333,'.':278,'/':278,'0':556,'1':556,'2':556,'3':556,'4':556,'5':556,'6':556,'7':556,'8':556,'9':556,':':278,';':278,'<':584,'=':584,'>':584,'?':556,'@':1015,'A':667, + 'B':667,'C':722,'D':722,'E':667,'F':611,'G':778,'H':722,'I':278,'J':500,'K':667,'L':556,'M':833,'N':722,'O':778,'P':667,'Q':778,'R':722,'S':667,'T':611,'U':722,'V':667,'W':944, + 'X':667,'Y':667,'Z':611,'[':278,'\\':278,']':278,'^':469,'_':556,'`':333,'a':556,'b':556,'c':500,'d':556,'e':556,'f':278,'g':556,'h':556,'i':222,'j':222,'k':500,'l':222,'m':833, + 'n':556,'o':556,'p':556,'q':556,'r':333,'s':500,'t':278,'u':556,'v':500,'w':722,'x':500,'y':500,'z':500,'{':334,'|':260,'}':334,'~':584,'\x7f':350,'\x80':556,'\x81':350,'\x82':222,'\x83':556, + '\x84':333,'\x85':1000,'\x86':556,'\x87':556,'\x88':333,'\x89':1000,'\x8a':667,'\x8b':333,'\x8c':1000,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':222,'\x92':222,'\x93':333,'\x94':333,'\x95':350,'\x96':556,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':500,'\x9b':333,'\x9c':944,'\x9d':350,'\x9e':500,'\x9f':667,'\xa0':278,'\xa1':333,'\xa2':556,'\xa3':556,'\xa4':556,'\xa5':556,'\xa6':260,'\xa7':556,'\xa8':333,'\xa9':737,'\xaa':370,'\xab':556,'\xac':584,'\xad':333,'\xae':737,'\xaf':333, + '\xb0':400,'\xb1':584,'\xb2':333,'\xb3':333,'\xb4':333,'\xb5':556,'\xb6':537,'\xb7':278,'\xb8':333,'\xb9':333,'\xba':365,'\xbb':556,'\xbc':834,'\xbd':834,'\xbe':834,'\xbf':611,'\xc0':667,'\xc1':667,'\xc2':667,'\xc3':667,'\xc4':667,'\xc5':667, + '\xc6':1000,'\xc7':722,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':278,'\xcd':278,'\xce':278,'\xcf':278,'\xd0':722,'\xd1':722,'\xd2':778,'\xd3':778,'\xd4':778,'\xd5':778,'\xd6':778,'\xd7':584,'\xd8':778,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':667,'\xde':667,'\xdf':611,'\xe0':556,'\xe1':556,'\xe2':556,'\xe3':556,'\xe4':556,'\xe5':556,'\xe6':889,'\xe7':500,'\xe8':556,'\xe9':556,'\xea':556,'\xeb':556,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':556,'\xf1':556, + '\xf2':556,'\xf3':556,'\xf4':556,'\xf5':556,'\xf6':556,'\xf7':584,'\xf8':611,'\xf9':556,'\xfa':556,'\xfb':556,'\xfc':556,'\xfd':500,'\xfe':556,'\xff':500} + +fpdf_charwidths['helveticaB']={ + '\x00':278,'\x01':278,'\x02':278,'\x03':278,'\x04':278,'\x05':278,'\x06':278,'\x07':278,'\x08':278,'\t':278,'\n':278,'\x0b':278,'\x0c':278,'\r':278,'\x0e':278,'\x0f':278,'\x10':278,'\x11':278,'\x12':278,'\x13':278,'\x14':278,'\x15':278, + '\x16':278,'\x17':278,'\x18':278,'\x19':278,'\x1a':278,'\x1b':278,'\x1c':278,'\x1d':278,'\x1e':278,'\x1f':278,' ':278,'!':333,'"':474,'#':556,'$':556,'%':889,'&':722,'\'':238,'(':333,')':333,'*':389,'+':584, + ',':278,'-':333,'.':278,'/':278,'0':556,'1':556,'2':556,'3':556,'4':556,'5':556,'6':556,'7':556,'8':556,'9':556,':':333,';':333,'<':584,'=':584,'>':584,'?':611,'@':975,'A':722, + 'B':722,'C':722,'D':722,'E':667,'F':611,'G':778,'H':722,'I':278,'J':556,'K':722,'L':611,'M':833,'N':722,'O':778,'P':667,'Q':778,'R':722,'S':667,'T':611,'U':722,'V':667,'W':944, + 'X':667,'Y':667,'Z':611,'[':333,'\\':278,']':333,'^':584,'_':556,'`':333,'a':556,'b':611,'c':556,'d':611,'e':556,'f':333,'g':611,'h':611,'i':278,'j':278,'k':556,'l':278,'m':889, + 'n':611,'o':611,'p':611,'q':611,'r':389,'s':556,'t':333,'u':611,'v':556,'w':778,'x':556,'y':556,'z':500,'{':389,'|':280,'}':389,'~':584,'\x7f':350,'\x80':556,'\x81':350,'\x82':278,'\x83':556, + '\x84':500,'\x85':1000,'\x86':556,'\x87':556,'\x88':333,'\x89':1000,'\x8a':667,'\x8b':333,'\x8c':1000,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':278,'\x92':278,'\x93':500,'\x94':500,'\x95':350,'\x96':556,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':556,'\x9b':333,'\x9c':944,'\x9d':350,'\x9e':500,'\x9f':667,'\xa0':278,'\xa1':333,'\xa2':556,'\xa3':556,'\xa4':556,'\xa5':556,'\xa6':280,'\xa7':556,'\xa8':333,'\xa9':737,'\xaa':370,'\xab':556,'\xac':584,'\xad':333,'\xae':737,'\xaf':333, + '\xb0':400,'\xb1':584,'\xb2':333,'\xb3':333,'\xb4':333,'\xb5':611,'\xb6':556,'\xb7':278,'\xb8':333,'\xb9':333,'\xba':365,'\xbb':556,'\xbc':834,'\xbd':834,'\xbe':834,'\xbf':611,'\xc0':722,'\xc1':722,'\xc2':722,'\xc3':722,'\xc4':722,'\xc5':722, + '\xc6':1000,'\xc7':722,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':278,'\xcd':278,'\xce':278,'\xcf':278,'\xd0':722,'\xd1':722,'\xd2':778,'\xd3':778,'\xd4':778,'\xd5':778,'\xd6':778,'\xd7':584,'\xd8':778,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':667,'\xde':667,'\xdf':611,'\xe0':556,'\xe1':556,'\xe2':556,'\xe3':556,'\xe4':556,'\xe5':556,'\xe6':889,'\xe7':556,'\xe8':556,'\xe9':556,'\xea':556,'\xeb':556,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':611,'\xf1':611, + '\xf2':611,'\xf3':611,'\xf4':611,'\xf5':611,'\xf6':611,'\xf7':584,'\xf8':611,'\xf9':611,'\xfa':611,'\xfb':611,'\xfc':611,'\xfd':556,'\xfe':611,'\xff':556 +} + +fpdf_charwidths['helveticaBI']={ + '\x00':278,'\x01':278,'\x02':278,'\x03':278,'\x04':278,'\x05':278,'\x06':278,'\x07':278,'\x08':278,'\t':278,'\n':278,'\x0b':278,'\x0c':278,'\r':278,'\x0e':278,'\x0f':278,'\x10':278,'\x11':278,'\x12':278,'\x13':278,'\x14':278,'\x15':278, + '\x16':278,'\x17':278,'\x18':278,'\x19':278,'\x1a':278,'\x1b':278,'\x1c':278,'\x1d':278,'\x1e':278,'\x1f':278,' ':278,'!':333,'"':474,'#':556,'$':556,'%':889,'&':722,'\'':238,'(':333,')':333,'*':389,'+':584, + ',':278,'-':333,'.':278,'/':278,'0':556,'1':556,'2':556,'3':556,'4':556,'5':556,'6':556,'7':556,'8':556,'9':556,':':333,';':333,'<':584,'=':584,'>':584,'?':611,'@':975,'A':722, + 'B':722,'C':722,'D':722,'E':667,'F':611,'G':778,'H':722,'I':278,'J':556,'K':722,'L':611,'M':833,'N':722,'O':778,'P':667,'Q':778,'R':722,'S':667,'T':611,'U':722,'V':667,'W':944, + 'X':667,'Y':667,'Z':611,'[':333,'\\':278,']':333,'^':584,'_':556,'`':333,'a':556,'b':611,'c':556,'d':611,'e':556,'f':333,'g':611,'h':611,'i':278,'j':278,'k':556,'l':278,'m':889, + 'n':611,'o':611,'p':611,'q':611,'r':389,'s':556,'t':333,'u':611,'v':556,'w':778,'x':556,'y':556,'z':500,'{':389,'|':280,'}':389,'~':584,'\x7f':350,'\x80':556,'\x81':350,'\x82':278,'\x83':556, + '\x84':500,'\x85':1000,'\x86':556,'\x87':556,'\x88':333,'\x89':1000,'\x8a':667,'\x8b':333,'\x8c':1000,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':278,'\x92':278,'\x93':500,'\x94':500,'\x95':350,'\x96':556,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':556,'\x9b':333,'\x9c':944,'\x9d':350,'\x9e':500,'\x9f':667,'\xa0':278,'\xa1':333,'\xa2':556,'\xa3':556,'\xa4':556,'\xa5':556,'\xa6':280,'\xa7':556,'\xa8':333,'\xa9':737,'\xaa':370,'\xab':556,'\xac':584,'\xad':333,'\xae':737,'\xaf':333, + '\xb0':400,'\xb1':584,'\xb2':333,'\xb3':333,'\xb4':333,'\xb5':611,'\xb6':556,'\xb7':278,'\xb8':333,'\xb9':333,'\xba':365,'\xbb':556,'\xbc':834,'\xbd':834,'\xbe':834,'\xbf':611,'\xc0':722,'\xc1':722,'\xc2':722,'\xc3':722,'\xc4':722,'\xc5':722, + '\xc6':1000,'\xc7':722,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':278,'\xcd':278,'\xce':278,'\xcf':278,'\xd0':722,'\xd1':722,'\xd2':778,'\xd3':778,'\xd4':778,'\xd5':778,'\xd6':778,'\xd7':584,'\xd8':778,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':667,'\xde':667,'\xdf':611,'\xe0':556,'\xe1':556,'\xe2':556,'\xe3':556,'\xe4':556,'\xe5':556,'\xe6':889,'\xe7':556,'\xe8':556,'\xe9':556,'\xea':556,'\xeb':556,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':611,'\xf1':611, + '\xf2':611,'\xf3':611,'\xf4':611,'\xf5':611,'\xf6':611,'\xf7':584,'\xf8':611,'\xf9':611,'\xfa':611,'\xfb':611,'\xfc':611,'\xfd':556,'\xfe':611,'\xff':556} + +fpdf_charwidths['helveticaI']={ + '\x00':278,'\x01':278,'\x02':278,'\x03':278,'\x04':278,'\x05':278,'\x06':278,'\x07':278,'\x08':278,'\t':278,'\n':278,'\x0b':278,'\x0c':278,'\r':278,'\x0e':278,'\x0f':278,'\x10':278,'\x11':278,'\x12':278,'\x13':278,'\x14':278,'\x15':278, + '\x16':278,'\x17':278,'\x18':278,'\x19':278,'\x1a':278,'\x1b':278,'\x1c':278,'\x1d':278,'\x1e':278,'\x1f':278,' ':278,'!':278,'"':355,'#':556,'$':556,'%':889,'&':667,'\'':191,'(':333,')':333,'*':389,'+':584, + ',':278,'-':333,'.':278,'/':278,'0':556,'1':556,'2':556,'3':556,'4':556,'5':556,'6':556,'7':556,'8':556,'9':556,':':278,';':278,'<':584,'=':584,'>':584,'?':556,'@':1015,'A':667, + 'B':667,'C':722,'D':722,'E':667,'F':611,'G':778,'H':722,'I':278,'J':500,'K':667,'L':556,'M':833,'N':722,'O':778,'P':667,'Q':778,'R':722,'S':667,'T':611,'U':722,'V':667,'W':944, + 'X':667,'Y':667,'Z':611,'[':278,'\\':278,']':278,'^':469,'_':556,'`':333,'a':556,'b':556,'c':500,'d':556,'e':556,'f':278,'g':556,'h':556,'i':222,'j':222,'k':500,'l':222,'m':833, + 'n':556,'o':556,'p':556,'q':556,'r':333,'s':500,'t':278,'u':556,'v':500,'w':722,'x':500,'y':500,'z':500,'{':334,'|':260,'}':334,'~':584,'\x7f':350,'\x80':556,'\x81':350,'\x82':222,'\x83':556, + '\x84':333,'\x85':1000,'\x86':556,'\x87':556,'\x88':333,'\x89':1000,'\x8a':667,'\x8b':333,'\x8c':1000,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':222,'\x92':222,'\x93':333,'\x94':333,'\x95':350,'\x96':556,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':500,'\x9b':333,'\x9c':944,'\x9d':350,'\x9e':500,'\x9f':667,'\xa0':278,'\xa1':333,'\xa2':556,'\xa3':556,'\xa4':556,'\xa5':556,'\xa6':260,'\xa7':556,'\xa8':333,'\xa9':737,'\xaa':370,'\xab':556,'\xac':584,'\xad':333,'\xae':737,'\xaf':333, + '\xb0':400,'\xb1':584,'\xb2':333,'\xb3':333,'\xb4':333,'\xb5':556,'\xb6':537,'\xb7':278,'\xb8':333,'\xb9':333,'\xba':365,'\xbb':556,'\xbc':834,'\xbd':834,'\xbe':834,'\xbf':611,'\xc0':667,'\xc1':667,'\xc2':667,'\xc3':667,'\xc4':667,'\xc5':667, + '\xc6':1000,'\xc7':722,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':278,'\xcd':278,'\xce':278,'\xcf':278,'\xd0':722,'\xd1':722,'\xd2':778,'\xd3':778,'\xd4':778,'\xd5':778,'\xd6':778,'\xd7':584,'\xd8':778,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':667,'\xde':667,'\xdf':611,'\xe0':556,'\xe1':556,'\xe2':556,'\xe3':556,'\xe4':556,'\xe5':556,'\xe6':889,'\xe7':500,'\xe8':556,'\xe9':556,'\xea':556,'\xeb':556,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':556,'\xf1':556, + '\xf2':556,'\xf3':556,'\xf4':556,'\xf5':556,'\xf6':556,'\xf7':584,'\xf8':611,'\xf9':556,'\xfa':556,'\xfb':556,'\xfc':556,'\xfd':500,'\xfe':556,'\xff':500} + +fpdf_charwidths['symbol']={ + '\x00':250,'\x01':250,'\x02':250,'\x03':250,'\x04':250,'\x05':250,'\x06':250,'\x07':250,'\x08':250,'\t':250,'\n':250,'\x0b':250,'\x0c':250,'\r':250,'\x0e':250,'\x0f':250,'\x10':250,'\x11':250,'\x12':250,'\x13':250,'\x14':250,'\x15':250, + '\x16':250,'\x17':250,'\x18':250,'\x19':250,'\x1a':250,'\x1b':250,'\x1c':250,'\x1d':250,'\x1e':250,'\x1f':250,' ':250,'!':333,'"':713,'#':500,'$':549,'%':833,'&':778,'\'':439,'(':333,')':333,'*':500,'+':549, + ',':250,'-':549,'.':250,'/':278,'0':500,'1':500,'2':500,'3':500,'4':500,'5':500,'6':500,'7':500,'8':500,'9':500,':':278,';':278,'<':549,'=':549,'>':549,'?':444,'@':549,'A':722, + 'B':667,'C':722,'D':612,'E':611,'F':763,'G':603,'H':722,'I':333,'J':631,'K':722,'L':686,'M':889,'N':722,'O':722,'P':768,'Q':741,'R':556,'S':592,'T':611,'U':690,'V':439,'W':768, + 'X':645,'Y':795,'Z':611,'[':333,'\\':863,']':333,'^':658,'_':500,'`':500,'a':631,'b':549,'c':549,'d':494,'e':439,'f':521,'g':411,'h':603,'i':329,'j':603,'k':549,'l':549,'m':576, + 'n':521,'o':549,'p':549,'q':521,'r':549,'s':603,'t':439,'u':576,'v':713,'w':686,'x':493,'y':686,'z':494,'{':480,'|':200,'}':480,'~':549,'\x7f':0,'\x80':0,'\x81':0,'\x82':0,'\x83':0, + '\x84':0,'\x85':0,'\x86':0,'\x87':0,'\x88':0,'\x89':0,'\x8a':0,'\x8b':0,'\x8c':0,'\x8d':0,'\x8e':0,'\x8f':0,'\x90':0,'\x91':0,'\x92':0,'\x93':0,'\x94':0,'\x95':0,'\x96':0,'\x97':0,'\x98':0,'\x99':0, + '\x9a':0,'\x9b':0,'\x9c':0,'\x9d':0,'\x9e':0,'\x9f':0,'\xa0':750,'\xa1':620,'\xa2':247,'\xa3':549,'\xa4':167,'\xa5':713,'\xa6':500,'\xa7':753,'\xa8':753,'\xa9':753,'\xaa':753,'\xab':1042,'\xac':987,'\xad':603,'\xae':987,'\xaf':603, + '\xb0':400,'\xb1':549,'\xb2':411,'\xb3':549,'\xb4':549,'\xb5':713,'\xb6':494,'\xb7':460,'\xb8':549,'\xb9':549,'\xba':549,'\xbb':549,'\xbc':1000,'\xbd':603,'\xbe':1000,'\xbf':658,'\xc0':823,'\xc1':686,'\xc2':795,'\xc3':987,'\xc4':768,'\xc5':768, + '\xc6':823,'\xc7':768,'\xc8':768,'\xc9':713,'\xca':713,'\xcb':713,'\xcc':713,'\xcd':713,'\xce':713,'\xcf':713,'\xd0':768,'\xd1':713,'\xd2':790,'\xd3':790,'\xd4':890,'\xd5':823,'\xd6':549,'\xd7':250,'\xd8':713,'\xd9':603,'\xda':603,'\xdb':1042, + '\xdc':987,'\xdd':603,'\xde':987,'\xdf':603,'\xe0':494,'\xe1':329,'\xe2':790,'\xe3':790,'\xe4':786,'\xe5':713,'\xe6':384,'\xe7':384,'\xe8':384,'\xe9':384,'\xea':384,'\xeb':384,'\xec':494,'\xed':494,'\xee':494,'\xef':494,'\xf0':0,'\xf1':329, + '\xf2':274,'\xf3':686,'\xf4':686,'\xf5':686,'\xf6':384,'\xf7':384,'\xf8':384,'\xf9':384,'\xfa':384,'\xfb':384,'\xfc':494,'\xfd':494,'\xfe':494,'\xff':0} + +fpdf_charwidths['times']={ + '\x00':250,'\x01':250,'\x02':250,'\x03':250,'\x04':250,'\x05':250,'\x06':250,'\x07':250,'\x08':250,'\t':250,'\n':250,'\x0b':250,'\x0c':250,'\r':250,'\x0e':250,'\x0f':250,'\x10':250,'\x11':250,'\x12':250,'\x13':250,'\x14':250,'\x15':250, + '\x16':250,'\x17':250,'\x18':250,'\x19':250,'\x1a':250,'\x1b':250,'\x1c':250,'\x1d':250,'\x1e':250,'\x1f':250,' ':250,'!':333,'"':408,'#':500,'$':500,'%':833,'&':778,'\'':180,'(':333,')':333,'*':500,'+':564, + ',':250,'-':333,'.':250,'/':278,'0':500,'1':500,'2':500,'3':500,'4':500,'5':500,'6':500,'7':500,'8':500,'9':500,':':278,';':278,'<':564,'=':564,'>':564,'?':444,'@':921,'A':722, + 'B':667,'C':667,'D':722,'E':611,'F':556,'G':722,'H':722,'I':333,'J':389,'K':722,'L':611,'M':889,'N':722,'O':722,'P':556,'Q':722,'R':667,'S':556,'T':611,'U':722,'V':722,'W':944, + 'X':722,'Y':722,'Z':611,'[':333,'\\':278,']':333,'^':469,'_':500,'`':333,'a':444,'b':500,'c':444,'d':500,'e':444,'f':333,'g':500,'h':500,'i':278,'j':278,'k':500,'l':278,'m':778, + 'n':500,'o':500,'p':500,'q':500,'r':333,'s':389,'t':278,'u':500,'v':500,'w':722,'x':500,'y':500,'z':444,'{':480,'|':200,'}':480,'~':541,'\x7f':350,'\x80':500,'\x81':350,'\x82':333,'\x83':500, + '\x84':444,'\x85':1000,'\x86':500,'\x87':500,'\x88':333,'\x89':1000,'\x8a':556,'\x8b':333,'\x8c':889,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':333,'\x92':333,'\x93':444,'\x94':444,'\x95':350,'\x96':500,'\x97':1000,'\x98':333,'\x99':980, + '\x9a':389,'\x9b':333,'\x9c':722,'\x9d':350,'\x9e':444,'\x9f':722,'\xa0':250,'\xa1':333,'\xa2':500,'\xa3':500,'\xa4':500,'\xa5':500,'\xa6':200,'\xa7':500,'\xa8':333,'\xa9':760,'\xaa':276,'\xab':500,'\xac':564,'\xad':333,'\xae':760,'\xaf':333, + '\xb0':400,'\xb1':564,'\xb2':300,'\xb3':300,'\xb4':333,'\xb5':500,'\xb6':453,'\xb7':250,'\xb8':333,'\xb9':300,'\xba':310,'\xbb':500,'\xbc':750,'\xbd':750,'\xbe':750,'\xbf':444,'\xc0':722,'\xc1':722,'\xc2':722,'\xc3':722,'\xc4':722,'\xc5':722, + '\xc6':889,'\xc7':667,'\xc8':611,'\xc9':611,'\xca':611,'\xcb':611,'\xcc':333,'\xcd':333,'\xce':333,'\xcf':333,'\xd0':722,'\xd1':722,'\xd2':722,'\xd3':722,'\xd4':722,'\xd5':722,'\xd6':722,'\xd7':564,'\xd8':722,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':722,'\xde':556,'\xdf':500,'\xe0':444,'\xe1':444,'\xe2':444,'\xe3':444,'\xe4':444,'\xe5':444,'\xe6':667,'\xe7':444,'\xe8':444,'\xe9':444,'\xea':444,'\xeb':444,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':500,'\xf1':500, + '\xf2':500,'\xf3':500,'\xf4':500,'\xf5':500,'\xf6':500,'\xf7':564,'\xf8':500,'\xf9':500,'\xfa':500,'\xfb':500,'\xfc':500,'\xfd':500,'\xfe':500,'\xff':500} + +fpdf_charwidths['timesB']={ + '\x00':250,'\x01':250,'\x02':250,'\x03':250,'\x04':250,'\x05':250,'\x06':250,'\x07':250,'\x08':250,'\t':250,'\n':250,'\x0b':250,'\x0c':250,'\r':250,'\x0e':250,'\x0f':250,'\x10':250,'\x11':250,'\x12':250,'\x13':250,'\x14':250,'\x15':250, + '\x16':250,'\x17':250,'\x18':250,'\x19':250,'\x1a':250,'\x1b':250,'\x1c':250,'\x1d':250,'\x1e':250,'\x1f':250,' ':250,'!':333,'"':555,'#':500,'$':500,'%':1000,'&':833,'\'':278,'(':333,')':333,'*':500,'+':570, + ',':250,'-':333,'.':250,'/':278,'0':500,'1':500,'2':500,'3':500,'4':500,'5':500,'6':500,'7':500,'8':500,'9':500,':':333,';':333,'<':570,'=':570,'>':570,'?':500,'@':930,'A':722, + 'B':667,'C':722,'D':722,'E':667,'F':611,'G':778,'H':778,'I':389,'J':500,'K':778,'L':667,'M':944,'N':722,'O':778,'P':611,'Q':778,'R':722,'S':556,'T':667,'U':722,'V':722,'W':1000, + 'X':722,'Y':722,'Z':667,'[':333,'\\':278,']':333,'^':581,'_':500,'`':333,'a':500,'b':556,'c':444,'d':556,'e':444,'f':333,'g':500,'h':556,'i':278,'j':333,'k':556,'l':278,'m':833, + 'n':556,'o':500,'p':556,'q':556,'r':444,'s':389,'t':333,'u':556,'v':500,'w':722,'x':500,'y':500,'z':444,'{':394,'|':220,'}':394,'~':520,'\x7f':350,'\x80':500,'\x81':350,'\x82':333,'\x83':500, + '\x84':500,'\x85':1000,'\x86':500,'\x87':500,'\x88':333,'\x89':1000,'\x8a':556,'\x8b':333,'\x8c':1000,'\x8d':350,'\x8e':667,'\x8f':350,'\x90':350,'\x91':333,'\x92':333,'\x93':500,'\x94':500,'\x95':350,'\x96':500,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':389,'\x9b':333,'\x9c':722,'\x9d':350,'\x9e':444,'\x9f':722,'\xa0':250,'\xa1':333,'\xa2':500,'\xa3':500,'\xa4':500,'\xa5':500,'\xa6':220,'\xa7':500,'\xa8':333,'\xa9':747,'\xaa':300,'\xab':500,'\xac':570,'\xad':333,'\xae':747,'\xaf':333, + '\xb0':400,'\xb1':570,'\xb2':300,'\xb3':300,'\xb4':333,'\xb5':556,'\xb6':540,'\xb7':250,'\xb8':333,'\xb9':300,'\xba':330,'\xbb':500,'\xbc':750,'\xbd':750,'\xbe':750,'\xbf':500,'\xc0':722,'\xc1':722,'\xc2':722,'\xc3':722,'\xc4':722,'\xc5':722, + '\xc6':1000,'\xc7':722,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':389,'\xcd':389,'\xce':389,'\xcf':389,'\xd0':722,'\xd1':722,'\xd2':778,'\xd3':778,'\xd4':778,'\xd5':778,'\xd6':778,'\xd7':570,'\xd8':778,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':722,'\xde':611,'\xdf':556,'\xe0':500,'\xe1':500,'\xe2':500,'\xe3':500,'\xe4':500,'\xe5':500,'\xe6':722,'\xe7':444,'\xe8':444,'\xe9':444,'\xea':444,'\xeb':444,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':500,'\xf1':556, + '\xf2':500,'\xf3':500,'\xf4':500,'\xf5':500,'\xf6':500,'\xf7':570,'\xf8':500,'\xf9':556,'\xfa':556,'\xfb':556,'\xfc':556,'\xfd':500,'\xfe':556,'\xff':500} + +fpdf_charwidths['timesBI']={ + '\x00':250,'\x01':250,'\x02':250,'\x03':250,'\x04':250,'\x05':250,'\x06':250,'\x07':250,'\x08':250,'\t':250,'\n':250,'\x0b':250,'\x0c':250,'\r':250,'\x0e':250,'\x0f':250,'\x10':250,'\x11':250,'\x12':250,'\x13':250,'\x14':250,'\x15':250, + '\x16':250,'\x17':250,'\x18':250,'\x19':250,'\x1a':250,'\x1b':250,'\x1c':250,'\x1d':250,'\x1e':250,'\x1f':250,' ':250,'!':389,'"':555,'#':500,'$':500,'%':833,'&':778,'\'':278,'(':333,')':333,'*':500,'+':570, + ',':250,'-':333,'.':250,'/':278,'0':500,'1':500,'2':500,'3':500,'4':500,'5':500,'6':500,'7':500,'8':500,'9':500,':':333,';':333,'<':570,'=':570,'>':570,'?':500,'@':832,'A':667, + 'B':667,'C':667,'D':722,'E':667,'F':667,'G':722,'H':778,'I':389,'J':500,'K':667,'L':611,'M':889,'N':722,'O':722,'P':611,'Q':722,'R':667,'S':556,'T':611,'U':722,'V':667,'W':889, + 'X':667,'Y':611,'Z':611,'[':333,'\\':278,']':333,'^':570,'_':500,'`':333,'a':500,'b':500,'c':444,'d':500,'e':444,'f':333,'g':500,'h':556,'i':278,'j':278,'k':500,'l':278,'m':778, + 'n':556,'o':500,'p':500,'q':500,'r':389,'s':389,'t':278,'u':556,'v':444,'w':667,'x':500,'y':444,'z':389,'{':348,'|':220,'}':348,'~':570,'\x7f':350,'\x80':500,'\x81':350,'\x82':333,'\x83':500, + '\x84':500,'\x85':1000,'\x86':500,'\x87':500,'\x88':333,'\x89':1000,'\x8a':556,'\x8b':333,'\x8c':944,'\x8d':350,'\x8e':611,'\x8f':350,'\x90':350,'\x91':333,'\x92':333,'\x93':500,'\x94':500,'\x95':350,'\x96':500,'\x97':1000,'\x98':333,'\x99':1000, + '\x9a':389,'\x9b':333,'\x9c':722,'\x9d':350,'\x9e':389,'\x9f':611,'\xa0':250,'\xa1':389,'\xa2':500,'\xa3':500,'\xa4':500,'\xa5':500,'\xa6':220,'\xa7':500,'\xa8':333,'\xa9':747,'\xaa':266,'\xab':500,'\xac':606,'\xad':333,'\xae':747,'\xaf':333, + '\xb0':400,'\xb1':570,'\xb2':300,'\xb3':300,'\xb4':333,'\xb5':576,'\xb6':500,'\xb7':250,'\xb8':333,'\xb9':300,'\xba':300,'\xbb':500,'\xbc':750,'\xbd':750,'\xbe':750,'\xbf':500,'\xc0':667,'\xc1':667,'\xc2':667,'\xc3':667,'\xc4':667,'\xc5':667, + '\xc6':944,'\xc7':667,'\xc8':667,'\xc9':667,'\xca':667,'\xcb':667,'\xcc':389,'\xcd':389,'\xce':389,'\xcf':389,'\xd0':722,'\xd1':722,'\xd2':722,'\xd3':722,'\xd4':722,'\xd5':722,'\xd6':722,'\xd7':570,'\xd8':722,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':611,'\xde':611,'\xdf':500,'\xe0':500,'\xe1':500,'\xe2':500,'\xe3':500,'\xe4':500,'\xe5':500,'\xe6':722,'\xe7':444,'\xe8':444,'\xe9':444,'\xea':444,'\xeb':444,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':500,'\xf1':556, + '\xf2':500,'\xf3':500,'\xf4':500,'\xf5':500,'\xf6':500,'\xf7':570,'\xf8':500,'\xf9':556,'\xfa':556,'\xfb':556,'\xfc':556,'\xfd':444,'\xfe':500,'\xff':444} + +fpdf_charwidths['timesI']={ + '\x00':250,'\x01':250,'\x02':250,'\x03':250,'\x04':250,'\x05':250,'\x06':250,'\x07':250,'\x08':250,'\t':250,'\n':250,'\x0b':250,'\x0c':250,'\r':250,'\x0e':250,'\x0f':250,'\x10':250,'\x11':250,'\x12':250,'\x13':250,'\x14':250,'\x15':250, + '\x16':250,'\x17':250,'\x18':250,'\x19':250,'\x1a':250,'\x1b':250,'\x1c':250,'\x1d':250,'\x1e':250,'\x1f':250,' ':250,'!':333,'"':420,'#':500,'$':500,'%':833,'&':778,'\'':214,'(':333,')':333,'*':500,'+':675, + ',':250,'-':333,'.':250,'/':278,'0':500,'1':500,'2':500,'3':500,'4':500,'5':500,'6':500,'7':500,'8':500,'9':500,':':333,';':333,'<':675,'=':675,'>':675,'?':500,'@':920,'A':611, + 'B':611,'C':667,'D':722,'E':611,'F':611,'G':722,'H':722,'I':333,'J':444,'K':667,'L':556,'M':833,'N':667,'O':722,'P':611,'Q':722,'R':611,'S':500,'T':556,'U':722,'V':611,'W':833, + 'X':611,'Y':556,'Z':556,'[':389,'\\':278,']':389,'^':422,'_':500,'`':333,'a':500,'b':500,'c':444,'d':500,'e':444,'f':278,'g':500,'h':500,'i':278,'j':278,'k':444,'l':278,'m':722, + 'n':500,'o':500,'p':500,'q':500,'r':389,'s':389,'t':278,'u':500,'v':444,'w':667,'x':444,'y':444,'z':389,'{':400,'|':275,'}':400,'~':541,'\x7f':350,'\x80':500,'\x81':350,'\x82':333,'\x83':500, + '\x84':556,'\x85':889,'\x86':500,'\x87':500,'\x88':333,'\x89':1000,'\x8a':500,'\x8b':333,'\x8c':944,'\x8d':350,'\x8e':556,'\x8f':350,'\x90':350,'\x91':333,'\x92':333,'\x93':556,'\x94':556,'\x95':350,'\x96':500,'\x97':889,'\x98':333,'\x99':980, + '\x9a':389,'\x9b':333,'\x9c':667,'\x9d':350,'\x9e':389,'\x9f':556,'\xa0':250,'\xa1':389,'\xa2':500,'\xa3':500,'\xa4':500,'\xa5':500,'\xa6':275,'\xa7':500,'\xa8':333,'\xa9':760,'\xaa':276,'\xab':500,'\xac':675,'\xad':333,'\xae':760,'\xaf':333, + '\xb0':400,'\xb1':675,'\xb2':300,'\xb3':300,'\xb4':333,'\xb5':500,'\xb6':523,'\xb7':250,'\xb8':333,'\xb9':300,'\xba':310,'\xbb':500,'\xbc':750,'\xbd':750,'\xbe':750,'\xbf':500,'\xc0':611,'\xc1':611,'\xc2':611,'\xc3':611,'\xc4':611,'\xc5':611, + '\xc6':889,'\xc7':667,'\xc8':611,'\xc9':611,'\xca':611,'\xcb':611,'\xcc':333,'\xcd':333,'\xce':333,'\xcf':333,'\xd0':722,'\xd1':667,'\xd2':722,'\xd3':722,'\xd4':722,'\xd5':722,'\xd6':722,'\xd7':675,'\xd8':722,'\xd9':722,'\xda':722,'\xdb':722, + '\xdc':722,'\xdd':556,'\xde':611,'\xdf':500,'\xe0':500,'\xe1':500,'\xe2':500,'\xe3':500,'\xe4':500,'\xe5':500,'\xe6':667,'\xe7':444,'\xe8':444,'\xe9':444,'\xea':444,'\xeb':444,'\xec':278,'\xed':278,'\xee':278,'\xef':278,'\xf0':500,'\xf1':500, + '\xf2':500,'\xf3':500,'\xf4':500,'\xf5':500,'\xf6':500,'\xf7':675,'\xf8':500,'\xf9':500,'\xfa':500,'\xfb':500,'\xfc':500,'\xfd':444,'\xfe':500,'\xff':444} + +fpdf_charwidths['zapfdingbats']={ + '\x00':0,'\x01':0,'\x02':0,'\x03':0,'\x04':0,'\x05':0,'\x06':0,'\x07':0,'\x08':0,'\t':0,'\n':0,'\x0b':0,'\x0c':0,'\r':0,'\x0e':0,'\x0f':0,'\x10':0,'\x11':0,'\x12':0,'\x13':0,'\x14':0,'\x15':0, + '\x16':0,'\x17':0,'\x18':0,'\x19':0,'\x1a':0,'\x1b':0,'\x1c':0,'\x1d':0,'\x1e':0,'\x1f':0,' ':278,'!':974,'"':961,'#':974,'$':980,'%':719,'&':789,'\'':790,'(':791,')':690,'*':960,'+':939, + ',':549,'-':855,'.':911,'/':933,'0':911,'1':945,'2':974,'3':755,'4':846,'5':762,'6':761,'7':571,'8':677,'9':763,':':760,';':759,'<':754,'=':494,'>':552,'?':537,'@':577,'A':692, + 'B':786,'C':788,'D':788,'E':790,'F':793,'G':794,'H':816,'I':823,'J':789,'K':841,'L':823,'M':833,'N':816,'O':831,'P':923,'Q':744,'R':723,'S':749,'T':790,'U':792,'V':695,'W':776, + 'X':768,'Y':792,'Z':759,'[':707,'\\':708,']':682,'^':701,'_':826,'`':815,'a':789,'b':789,'c':707,'d':687,'e':696,'f':689,'g':786,'h':787,'i':713,'j':791,'k':785,'l':791,'m':873, + 'n':761,'o':762,'p':762,'q':759,'r':759,'s':892,'t':892,'u':788,'v':784,'w':438,'x':138,'y':277,'z':415,'{':392,'|':392,'}':668,'~':668,'\x7f':0,'\x80':390,'\x81':390,'\x82':317,'\x83':317, + '\x84':276,'\x85':276,'\x86':509,'\x87':509,'\x88':410,'\x89':410,'\x8a':234,'\x8b':234,'\x8c':334,'\x8d':334,'\x8e':0,'\x8f':0,'\x90':0,'\x91':0,'\x92':0,'\x93':0,'\x94':0,'\x95':0,'\x96':0,'\x97':0,'\x98':0,'\x99':0, + '\x9a':0,'\x9b':0,'\x9c':0,'\x9d':0,'\x9e':0,'\x9f':0,'\xa0':0,'\xa1':732,'\xa2':544,'\xa3':544,'\xa4':910,'\xa5':667,'\xa6':760,'\xa7':760,'\xa8':776,'\xa9':595,'\xaa':694,'\xab':626,'\xac':788,'\xad':788,'\xae':788,'\xaf':788, + '\xb0':788,'\xb1':788,'\xb2':788,'\xb3':788,'\xb4':788,'\xb5':788,'\xb6':788,'\xb7':788,'\xb8':788,'\xb9':788,'\xba':788,'\xbb':788,'\xbc':788,'\xbd':788,'\xbe':788,'\xbf':788,'\xc0':788,'\xc1':788,'\xc2':788,'\xc3':788,'\xc4':788,'\xc5':788, + '\xc6':788,'\xc7':788,'\xc8':788,'\xc9':788,'\xca':788,'\xcb':788,'\xcc':788,'\xcd':788,'\xce':788,'\xcf':788,'\xd0':788,'\xd1':788,'\xd2':788,'\xd3':788,'\xd4':894,'\xd5':838,'\xd6':1016,'\xd7':458,'\xd8':748,'\xd9':924,'\xda':748,'\xdb':918, + '\xdc':927,'\xdd':928,'\xde':928,'\xdf':834,'\xe0':873,'\xe1':828,'\xe2':924,'\xe3':924,'\xe4':917,'\xe5':930,'\xe6':931,'\xe7':463,'\xe8':883,'\xe9':836,'\xea':836,'\xeb':867,'\xec':867,'\xed':696,'\xee':696,'\xef':874,'\xf0':0,'\xf1':874, + '\xf2':760,'\xf3':946,'\xf4':771,'\xf5':865,'\xf6':771,'\xf7':888,'\xf8':967,'\xf9':888,'\xfa':831,'\xfb':873,'\xfc':927,'\xfd':970,'\xfe':918,'\xff':0} + diff --git a/lib/python3.12/site-packages/fpdf/fpdf.py b/lib/python3.12/site-packages/fpdf/fpdf.py new file mode 100644 index 0000000..6ae1e38 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/fpdf.py @@ -0,0 +1,2000 @@ +#!/usr/bin/env python +# -*- coding: latin-1 -*- +# **************************************************************************** +# * Software: FPDF for python * +# * Version: 1.7.1 * +# * Date: 2010-09-10 * +# * Last update: 2012-08-16 * +# * License: LGPL v3.0 * +# * * +# * Original Author (PHP): Olivier PLATHEY 2004-12-31 * +# * Ported to Python 2.4 by Max (maxpat78@yahoo.it) on 2006-05 * +# * Maintainer: Mariano Reingart (reingart@gmail.com) et al since 2008 est. * +# * NOTE: 'I' and 'D' destinations are disabled, and simply print to STDOUT * +# **************************************************************************** + +from __future__ import division + +from datetime import datetime +from functools import wraps +import math +import errno +import os, sys, zlib, struct, re, tempfile, struct + +from .ttfonts import TTFontFile +from .fonts import fpdf_charwidths +from .php import substr, sprintf, print_r, UTF8ToUTF16BE, UTF8StringToArray +from .py3k import PY3K, pickle, urlopen, Image, basestring, unicode, exception, b, hashpath + +# Global variables +FPDF_VERSION = '1.7.2' +FPDF_FONT_DIR = os.path.join(os.path.dirname(__file__),'font') +FPDF_CACHE_MODE = 0 # 0 - in same foder, 1 - none, 2 - hash +FPDF_CACHE_DIR = None +SYSTEM_TTFONTS = None + + +def set_global(var, val): + globals()[var] = val + + +class FPDF(object): + "PDF Generation class" + + def __init__(self, orientation='P',unit='mm',format='A4'): + # Some checks + self._dochecks() + # Initialization of properties + self.offsets={} # array of object offsets + self.page=0 # current page number + self.n=2 # current object number + self.buffer='' # buffer holding in-memory PDF + self.pages={} # array containing pages + self.orientation_changes={} # array indicating orientation changes + self.state=0 # current document state + self.fonts={} # array of used fonts + self.font_files={} # array of font files + self.diffs={} # array of encoding differences + self.images={} # array of used images + self.page_links={} # array of links in pages + self.links={} # array of internal links + self.in_footer=0 # flag set when processing footer + self.lastw=0 + self.lasth=0 # height of last cell printed + self.font_family='' # current font family + self.font_style='' # current font style + self.font_size_pt=12 # current font size in points + self.underline=0 # underlining flag + self.draw_color='0 G' + self.fill_color='0 g' + self.text_color='0 g' + self.color_flag=0 # indicates whether fill and text colors are different + self.ws=0 # word spacing + self.angle=0 + # Standard fonts + self.core_fonts={'courier':'Courier','courierB':'Courier-Bold','courierI':'Courier-Oblique','courierBI':'Courier-BoldOblique', + 'helvetica':'Helvetica','helveticaB':'Helvetica-Bold','helveticaI':'Helvetica-Oblique','helveticaBI':'Helvetica-BoldOblique', + 'times':'Times-Roman','timesB':'Times-Bold','timesI':'Times-Italic','timesBI':'Times-BoldItalic', + 'symbol':'Symbol','zapfdingbats':'ZapfDingbats'} + # Scale factor + if(unit=='pt'): + self.k=1 + elif(unit=='mm'): + self.k=72/25.4 + elif(unit=='cm'): + self.k=72/2.54 + elif(unit=='in'): + self.k=72. + else: + self.error('Incorrect unit: '+unit) + # Page format + if(isinstance(format,basestring)): + format=format.lower() + if(format=='a3'): + format=(841.89,1190.55) + elif(format=='a4'): + format=(595.28,841.89) + elif(format=='a5'): + format=(420.94,595.28) + elif(format=='letter'): + format=(612,792) + elif(format=='legal'): + format=(612,1008) + else: + self.error('Unknown page format: '+format) + self.fw_pt=format[0] + self.fh_pt=format[1] + else: + self.fw_pt=format[0]*self.k + self.fh_pt=format[1]*self.k + self.fw=self.fw_pt/self.k + self.fh=self.fh_pt/self.k + # Page orientation + orientation=orientation.lower() + if(orientation=='p' or orientation=='portrait'): + self.def_orientation='P' + self.w_pt=self.fw_pt + self.h_pt=self.fh_pt + elif(orientation=='l' or orientation=='landscape'): + self.def_orientation='L' + self.w_pt=self.fh_pt + self.h_pt=self.fw_pt + else: + self.error('Incorrect orientation: '+orientation) + self.cur_orientation=self.def_orientation + self.w=self.w_pt/self.k + self.h=self.h_pt/self.k + # Page margins (1 cm) + margin=28.35/self.k + self.set_margins(margin,margin) + # Interior cell margin (1 mm) + self.c_margin=margin/10.0 + # line width (0.2 mm) + self.line_width=.567/self.k + # Automatic page break + self.set_auto_page_break(1,2*margin) + # Full width display mode + self.set_display_mode('fullwidth') + # Enable compression + self.set_compression(1) + # Set default PDF version number + self.pdf_version='1.3' + + def check_page(fn): + "Decorator to protect drawing methods" + @wraps(fn) + def wrapper(self, *args, **kwargs): + if not self.page and not kwargs.get('split_only'): + self.error("No page open, you need to call add_page() first") + else: + return fn(self, *args, **kwargs) + return wrapper + + def set_margins(self, left,top,right=-1): + "Set left, top and right margins" + self.l_margin=left + self.t_margin=top + if(right==-1): + right=left + self.r_margin=right + + def set_left_margin(self, margin): + "Set left margin" + self.l_margin=margin + if(self.page>0 and self.x0): + #Page footer + self.in_footer=1 + self.footer() + self.in_footer=0 + #close page + self._endpage() + #Start new page + self._beginpage(orientation) + #Set line cap style to square + self._out('2 J') + #Set line width + self.line_width=lw + self._out(sprintf('%.2f w',lw*self.k)) + #Set font + if(family): + self.set_font(family,style,size) + #Set colors + self.draw_color=dc + if(dc!='0 G'): + self._out(dc) + self.fill_color=fc + if(fc!='0 g'): + self._out(fc) + self.text_color=tc + self.color_flag=cf + #Page header + self.header() + #Restore line width + if(self.line_width!=lw): + self.line_width=lw + self._out(sprintf('%.2f w',lw*self.k)) + #Restore font + if(family): + self.set_font(family,style,size) + #Restore colors + if(self.draw_color!=dc): + self.draw_color=dc + self._out(dc) + if(self.fill_color!=fc): + self.fill_color=fc + self._out(fc) + self.text_color=tc + self.color_flag=cf + + def header(self): + "Header to be implemented in your own inherited class" + pass + + def footer(self): + "Footer to be implemented in your own inherited class" + pass + + def page_no(self): + "Get current page number" + return self.page + + def set_draw_color(self, r,g=-1,b=-1): + "Set color for all stroking operations" + if((r==0 and g==0 and b==0) or g==-1): + self.draw_color=sprintf('%.3f G',r/255.0) + else: + self.draw_color=sprintf('%.3f %.3f %.3f RG',r/255.0,g/255.0,b/255.0) + if(self.page>0): + self._out(self.draw_color) + + def set_fill_color(self,r,g=-1,b=-1): + "Set color for all filling operations" + if((r==0 and g==0 and b==0) or g==-1): + self.fill_color=sprintf('%.3f g',r/255.0) + else: + self.fill_color=sprintf('%.3f %.3f %.3f rg',r/255.0,g/255.0,b/255.0) + self.color_flag=(self.fill_color!=self.text_color) + if(self.page>0): + self._out(self.fill_color) + + def set_text_color(self, r,g=-1,b=-1): + "Set color for text" + if((r==0 and g==0 and b==0) or g==-1): + self.text_color=sprintf('%.3f g',r/255.0) + else: + self.text_color=sprintf('%.3f %.3f %.3f rg',r/255.0,g/255.0,b/255.0) + self.color_flag=(self.fill_color!=self.text_color) + + def get_string_width(self, s): + "Get width of a string in the current font" + s = self.normalize_text(s) + cw=self.current_font['cw'] + w=0 + l=len(s) + if self.unifontsubset: + for char in s: + char = ord(char) + if len(cw) > char: + w += cw[char] # ord(cw[2*char])<<8 + ord(cw[2*char+1]) + #elif (char>0 and char<128 and isset($cw[chr($char)])) { $w += $cw[chr($char)]; } + elif (self.current_font['desc']['MissingWidth']) : + w += self.current_font['desc']['MissingWidth'] + #elif (isset($this->CurrentFont['MissingWidth'])) { $w += $this->CurrentFont['MissingWidth']; } + else: + w += 500 + else: + for i in range(0, l): + w += cw.get(s[i],0) + return w*self.font_size/1000.0 + + def set_line_width(self, width): + "Set line width" + self.line_width=width + if(self.page>0): + self._out(sprintf('%.2f w',width*self.k)) + + @check_page + def line(self, x1,y1,x2,y2): + "Draw a line" + self._out(sprintf('%.2f %.2f m %.2f %.2f l S',x1*self.k,(self.h-y1)*self.k,x2*self.k,(self.h-y2)*self.k)) + + def _set_dash(self, dash_length=False, space_length=False): + if(dash_length and space_length): + s = sprintf('[%.3f %.3f] 0 d', dash_length*self.k, space_length*self.k) + else: + s = '[] 0 d' + self._out(s) + + @check_page + def dashed_line(self, x1,y1,x2,y2, dash_length=1, space_length=1): + """Draw a dashed line. Same interface as line() except: + - dash_length: Length of the dash + - space_length: Length of the space between dashes""" + self._set_dash(dash_length, space_length) + self.line(x1, y1, x2, y2) + self._set_dash() + + @check_page + def rect(self, x,y,w,h,style=''): + "Draw a rectangle" + if(style=='F'): + op='f' + elif(style=='FD' or style=='DF'): + op='B' + else: + op='S' + self._out(sprintf('%.2f %.2f %.2f %.2f re %s',x*self.k,(self.h-y)*self.k,w*self.k,-h*self.k,op)) + + @check_page + def ellipse(self, x,y,w,h,style=''): + "Draw a ellipse" + if(style=='F'): + op='f' + elif(style=='FD' or style=='DF'): + op='B' + else: + op='S' + + cx = x + w/2.0 + cy = y + h/2.0 + rx = w/2.0 + ry = h/2.0 + + lx = 4.0/3.0*(math.sqrt(2)-1)*rx + ly = 4.0/3.0*(math.sqrt(2)-1)*ry + + self._out(sprintf('%.2f %.2f m %.2f %.2f %.2f %.2f %.2f %.2f c', + (cx+rx)*self.k, (self.h-cy)*self.k, + (cx+rx)*self.k, (self.h-(cy-ly))*self.k, + (cx+lx)*self.k, (self.h-(cy-ry))*self.k, + cx*self.k, (self.h-(cy-ry))*self.k)) + self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c', + (cx-lx)*self.k, (self.h-(cy-ry))*self.k, + (cx-rx)*self.k, (self.h-(cy-ly))*self.k, + (cx-rx)*self.k, (self.h-cy)*self.k)) + self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c', + (cx-rx)*self.k, (self.h-(cy+ly))*self.k, + (cx-lx)*self.k, (self.h-(cy+ry))*self.k, + cx*self.k, (self.h-(cy+ry))*self.k)) + self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c %s', + (cx+lx)*self.k, (self.h-(cy+ry))*self.k, + (cx+rx)*self.k, (self.h-(cy+ly))*self.k, + (cx+rx)*self.k, (self.h-cy)*self.k, + op)) + + def add_font(self, family, style='', fname='', uni=False): + "Add a TrueType or Type1 font" + family = family.lower() + if (fname == ''): + fname = family.replace(' ','') + style.lower() + '.pkl' + if (family == 'arial'): + family = 'helvetica' + style = style.upper() + if (style == 'IB'): + style = 'BI' + fontkey = family+style + if fontkey in self.fonts: + # Font already added! + return + if (uni): + global SYSTEM_TTFONTS, FPDF_CACHE_MODE, FPDF_CACHE_DIR + if os.path.exists(fname): + ttffilename = fname + elif (FPDF_FONT_DIR and + os.path.exists(os.path.join(FPDF_FONT_DIR, fname))): + ttffilename = os.path.join(FPDF_FONT_DIR, fname) + elif (SYSTEM_TTFONTS and + os.path.exists(os.path.join(SYSTEM_TTFONTS, fname))): + ttffilename = os.path.join(SYSTEM_TTFONTS, fname) + else: + raise RuntimeError("TTF Font file not found: %s" % fname) + name = '' + if FPDF_CACHE_MODE == 0: + unifilename = os.path.splitext(ttffilename)[0] + '.pkl' + elif FPDF_CACHE_MODE == 2: + unifilename = os.path.join(FPDF_CACHE_DIR, \ + hashpath(ttffilename) + ".pkl") + else: + unifilename = None + if unifilename and os.path.exists(unifilename): + fh = open(unifilename, "rb") + try: + font_dict = pickle.load(fh) + finally: + fh.close() + else: + ttf = TTFontFile() + ttf.getMetrics(ttffilename) + desc = { + 'Ascent': int(round(ttf.ascent, 0)), + 'Descent': int(round(ttf.descent, 0)), + 'CapHeight': int(round(ttf.capHeight, 0)), + 'Flags': ttf.flags, + 'FontBBox': "[%s %s %s %s]" % ( + int(round(ttf.bbox[0], 0)), + int(round(ttf.bbox[1], 0)), + int(round(ttf.bbox[2], 0)), + int(round(ttf.bbox[3], 0))), + 'ItalicAngle': int(ttf.italicAngle), + 'StemV': int(round(ttf.stemV, 0)), + 'MissingWidth': int(round(ttf.defaultWidth, 0)), + } + # Generate metrics .pkl file + font_dict = { + 'name': re.sub('[ ()]', '', ttf.fullName), + 'type': 'TTF', + 'desc': desc, + 'up': round(ttf.underlinePosition), + 'ut': round(ttf.underlineThickness), + 'ttffile': ttffilename, + 'fontkey': fontkey, + 'originalsize': os.stat(ttffilename).st_size, + 'cw': ttf.charWidths, + } + if unifilename: + try: + fh = open(unifilename, "wb") + pickle.dump(font_dict, fh) + fh.close() + except IOError: + if not exception().errno == errno.EACCES: + raise # Not a permission error. + del ttf + if hasattr(self,'str_alias_nb_pages'): + sbarr = list(range(0,57)) # include numbers in the subset! + else: + sbarr = list(range(0,32)) + self.fonts[fontkey] = { + 'i': len(self.fonts)+1, 'type': font_dict['type'], + 'name': font_dict['name'], 'desc': font_dict['desc'], + 'up': font_dict['up'], 'ut': font_dict['ut'], + 'cw': font_dict['cw'], + 'ttffile': font_dict['ttffile'], 'fontkey': fontkey, + 'subset': sbarr, 'unifilename': unifilename, + } + self.font_files[fontkey] = {'length1': font_dict['originalsize'], + 'type': "TTF", 'ttffile': ttffilename} + self.font_files[fname] = {'type': "TTF"} + else: + fontfile = open(fname) + try: + font_dict = pickle.load(fontfile) + finally: + fontfile.close() + self.fonts[fontkey] = {'i': len(self.fonts)+1} + self.fonts[fontkey].update(font_dict) + if (diff): + #Search existing encodings + d = 0 + nb = len(self.diffs) + for i in range(1, nb+1): + if(self.diffs[i] == diff): + d = i + break + if (d == 0): + d = nb + 1 + self.diffs[d] = diff + self.fonts[fontkey]['diff'] = d + filename = font_dict.get('filename') + if (filename): + if (type == 'TrueType'): + self.font_files[filename]={'length1': originalsize} + else: + self.font_files[filename]={'length1': size1, + 'length2': size2} + + def set_font(self, family,style='',size=0): + "Select a font; size given in points" + family=family.lower() + if(family==''): + family=self.font_family + if(family=='arial'): + family='helvetica' + elif(family=='symbol' or family=='zapfdingbats'): + style='' + style=style.upper() + if('U' in style): + self.underline=1 + style=style.replace('U','') + else: + self.underline=0 + if(style=='IB'): + style='BI' + if(size==0): + size=self.font_size_pt + #Test if font is already selected + if(self.font_family==family and self.font_style==style and self.font_size_pt==size): + return + #Test if used for the first time + fontkey=family+style + if fontkey not in self.fonts: + #Check if one of the standard fonts + if fontkey in self.core_fonts: + if fontkey not in fpdf_charwidths: + #Load metric file + name=os.path.join(FPDF_FONT_DIR,family) + if(family=='times' or family=='helvetica'): + name+=style.lower() + exec(compile(open(name+'.font').read(), name+'.font', 'exec')) + if fontkey not in fpdf_charwidths: + self.error('Could not include font metric file for'+fontkey) + i=len(self.fonts)+1 + self.fonts[fontkey]={'i':i,'type':'core','name':self.core_fonts[fontkey],'up':-100,'ut':50,'cw':fpdf_charwidths[fontkey]} + else: + self.error('Undefined font: '+family+' '+style) + #Select it + self.font_family=family + self.font_style=style + self.font_size_pt=size + self.font_size=size/self.k + self.current_font=self.fonts[fontkey] + self.unifontsubset = (self.fonts[fontkey]['type'] == 'TTF') + if(self.page>0): + self._out(sprintf('BT /F%d %.2f Tf ET',self.current_font['i'],self.font_size_pt)) + + def set_font_size(self, size): + "Set font size in points" + if(self.font_size_pt==size): + return + self.font_size_pt=size + self.font_size=size/self.k + if(self.page>0): + self._out(sprintf('BT /F%d %.2f Tf ET',self.current_font['i'],self.font_size_pt)) + + def add_link(self): + "Create a new internal link" + n=len(self.links)+1 + self.links[n]=(0,0) + return n + + def set_link(self, link,y=0,page=-1): + "Set destination of internal link" + if(y==-1): + y=self.y + if(page==-1): + page=self.page + self.links[link]=[page,y] + + def link(self, x,y,w,h,link): + "Put a link on the page" + if not self.page in self.page_links: + self.page_links[self.page] = [] + self.page_links[self.page] += [(x*self.k,self.h_pt-y*self.k,w*self.k,h*self.k,link),] + + @check_page + def text(self, x, y, txt=''): + "Output a string" + txt = self.normalize_text(txt) + if (self.unifontsubset): + txt2 = self._escape(UTF8ToUTF16BE(txt, False)) + for uni in UTF8StringToArray(txt): + self.current_font['subset'].append(uni) + else: + txt2 = self._escape(txt) + s=sprintf('BT %.2f %.2f Td (%s) Tj ET',x*self.k,(self.h-y)*self.k, txt2) + if(self.underline and txt!=''): + s+=' '+self._dounderline(x,y,txt) + if(self.color_flag): + s='q '+self.text_color+' '+s+' Q' + self._out(s) + + @check_page + def rotate(self, angle, x=None, y=None): + if x is None: + x = self.x + if y is None: + y = self.y; + if self.angle!=0: + self._out('Q') + self.angle = angle + if angle!=0: + angle *= math.pi/180; + c = math.cos(angle); + s = math.sin(angle); + cx = x*self.k; + cy = (self.h-y)*self.k + s = sprintf('q %.5F %.5F %.5F %.5F %.2F %.2F cm 1 0 0 1 %.2F %.2F cm',c,s,-s,c,cx,cy,-cx,-cy) + self._out(s) + + def accept_page_break(self): + "Accept automatic page break or not" + return self.auto_page_break + + @check_page + def cell(self, w,h=0,txt='',border=0,ln=0,align='',fill=0,link=''): + "Output a cell" + txt = self.normalize_text(txt) + k=self.k + if(self.y+h>self.page_break_trigger and not self.in_footer and self.accept_page_break()): + #Automatic page break + x=self.x + ws=self.ws + if(ws>0): + self.ws=0 + self._out('0 Tw') + self.add_page(self.cur_orientation) + self.x=x + if(ws>0): + self.ws=ws + self._out(sprintf('%.3f Tw',ws*k)) + if(w==0): + w=self.w-self.r_margin-self.x + s='' + if(fill==1 or border==1): + if(fill==1): + if border==1: + op='B' + else: + op='f' + else: + op='S' + s=sprintf('%.2f %.2f %.2f %.2f re %s ',self.x*k,(self.h-self.y)*k,w*k,-h*k,op) + if(isinstance(border,basestring)): + x=self.x + y=self.y + if('L' in border): + s+=sprintf('%.2f %.2f m %.2f %.2f l S ',x*k,(self.h-y)*k,x*k,(self.h-(y+h))*k) + if('T' in border): + s+=sprintf('%.2f %.2f m %.2f %.2f l S ',x*k,(self.h-y)*k,(x+w)*k,(self.h-y)*k) + if('R' in border): + s+=sprintf('%.2f %.2f m %.2f %.2f l S ',(x+w)*k,(self.h-y)*k,(x+w)*k,(self.h-(y+h))*k) + if('B' in border): + s+=sprintf('%.2f %.2f m %.2f %.2f l S ',x*k,(self.h-(y+h))*k,(x+w)*k,(self.h-(y+h))*k) + if(txt!=''): + if(align=='R'): + dx=w-self.c_margin-self.get_string_width(txt) + elif(align=='C'): + dx=(w-self.get_string_width(txt))/2.0 + else: + dx=self.c_margin + if(self.color_flag): + s+='q '+self.text_color+' ' + + # If multibyte, Tw has no effect - do word spacing using an adjustment before each space + if (self.ws and self.unifontsubset): + for uni in UTF8StringToArray(txt): + self.current_font['subset'].append(uni) + space = self._escape(UTF8ToUTF16BE(' ', False)) + s += sprintf('BT 0 Tw %.2F %.2F Td [',(self.x + dx) * k,(self.h - (self.y + 0.5*h+ 0.3 * self.font_size)) * k) + t = txt.split(' ') + numt = len(t) + for i in range(numt): + tx = t[i] + tx = '(' + self._escape(UTF8ToUTF16BE(tx, False)) + ')' + s += sprintf('%s ', tx); + if ((i+1)0): + #Go to next line + self.y+=h + if(ln==1): + self.x=self.l_margin + else: + self.x+=w + + @check_page + def multi_cell(self, w, h, txt='', border=0, align='J', fill=0, split_only=False): + "Output text with automatic or explicit line breaks" + txt = self.normalize_text(txt) + ret = [] # if split_only = True, returns splited text cells + cw=self.current_font['cw'] + if(w==0): + w=self.w-self.r_margin-self.x + wmax=(w-2*self.c_margin)*1000.0/self.font_size + s=txt.replace("\r",'') + nb=len(s) + if(nb>0 and s[nb-1]=="\n"): + nb-=1 + b=0 + if(border): + if(border==1): + border='LTRB' + b='LRT' + b2='LR' + else: + b2='' + if('L' in border): + b2+='L' + if('R' in border): + b2+='R' + if ('T' in border): + b=b2+'T' + else: + b=b2 + sep=-1 + i=0 + j=0 + l=0 + ns=0 + nl=1 + while(i0): + self.ws=0 + if not split_only: + self._out('0 Tw') + if not split_only: + self.cell(w,h,substr(s,j,i-j),b,2,align,fill) + else: + ret.append(substr(s,j,i-j)) + i+=1 + sep=-1 + j=i + l=0 + ns=0 + nl+=1 + if(border and nl==2): + b=b2 + continue + if(c==' '): + sep=i + ls=l + ns+=1 + if self.unifontsubset: + l += self.get_string_width(c) / self.font_size*1000.0 + else: + l += cw.get(c,0) + if(l>wmax): + #Automatic line break + if(sep==-1): + if(i==j): + i+=1 + if(self.ws>0): + self.ws=0 + if not split_only: + self._out('0 Tw') + if not split_only: + self.cell(w,h,substr(s,j,i-j),b,2,align,fill) + else: + ret.append(substr(s,j,i-j)) + else: + if(align=='J'): + if ns>1: + self.ws=(wmax-ls)/1000.0*self.font_size/(ns-1) + else: + self.ws=0 + if not split_only: + self._out(sprintf('%.3f Tw',self.ws*self.k)) + if not split_only: + self.cell(w,h,substr(s,j,sep-j),b,2,align,fill) + else: + ret.append(substr(s,j,sep-j)) + i=sep+1 + sep=-1 + j=i + l=0 + ns=0 + nl+=1 + if(border and nl==2): + b=b2 + else: + i+=1 + #Last chunk + if(self.ws>0): + self.ws=0 + if not split_only: + self._out('0 Tw') + if(border and 'B' in border): + b+='B' + if not split_only: + self.cell(w,h,substr(s,j,i-j),b,2,align,fill) + self.x=self.l_margin + else: + ret.append(substr(s,j,i-j)) + return ret + + @check_page + def write(self, h, txt='', link=''): + "Output text in flowing mode" + txt = self.normalize_text(txt) + cw=self.current_font['cw'] + w=self.w-self.r_margin-self.x + wmax=(w-2*self.c_margin)*1000.0/self.font_size + s=txt.replace("\r",'') + nb=len(s) + sep=-1 + i=0 + j=0 + l=0 + nl=1 + while(iwmax): + #Automatic line break + if(sep==-1): + if(self.x>self.l_margin): + #Move to next line + self.x=self.l_margin + self.y+=h + w=self.w-self.r_margin-self.x + wmax=(w-2*self.c_margin)*1000.0/self.font_size + i+=1 + nl+=1 + continue + if(i==j): + i+=1 + self.cell(w,h,substr(s,j,i-j),0,2,'',0,link) + else: + self.cell(w,h,substr(s,j,sep-j),0,2,'',0,link) + i=sep+1 + sep=-1 + j=i + l=0 + if(nl==1): + self.x=self.l_margin + w=self.w-self.r_margin-self.x + wmax=(w-2*self.c_margin)*1000.0/self.font_size + nl+=1 + else: + i+=1 + #Last chunk + if(i!=j): + self.cell(l/1000.0*self.font_size,h,substr(s,j),0,0,'',0,link) + + @check_page + def image(self, name, x=None, y=None, w=0,h=0,type='',link=''): + "Put an image on the page" + if not name in self.images: + #First use of image, get info + if(type==''): + pos=name.rfind('.') + if(not pos): + self.error('image file has no extension and no type was specified: '+name) + type=substr(name,pos+1) + type=type.lower() + if(type=='jpg' or type=='jpeg'): + info=self._parsejpg(name) + elif(type=='png'): + info=self._parsepng(name) + else: + #Allow for additional formats + #maybe the image is not showing the correct extension, + #but the header is OK, + succeed_parsing = False + #try all the parsing functions + parsing_functions = [self._parsejpg,self._parsepng,self._parsegif] + for pf in parsing_functions: + try: + info = pf(name) + succeed_parsing = True + break; + except: + pass + #last resource + if not succeed_parsing: + mtd='_parse'+type + if not hasattr(self,mtd): + self.error('Unsupported image type: '+type) + info=getattr(self, mtd)(name) + mtd='_parse'+type + if not hasattr(self,mtd): + self.error('Unsupported image type: '+type) + info=getattr(self, mtd)(name) + info['i']=len(self.images)+1 + self.images[name]=info + else: + info=self.images[name] + #Automatic width and height calculation if needed + if(w==0 and h==0): + #Put image at 72 dpi + w=info['w']/self.k + h=info['h']/self.k + elif(w==0): + w=h*info['w']/info['h'] + elif(h==0): + h=w*info['h']/info['w'] + # Flowing mode + if y is None: + if (self.y + h > self.page_break_trigger and not self.in_footer and self.accept_page_break()): + #Automatic page break + x = self.x + self.add_page(self.cur_orientation) + self.x = x + y = self.y + self.y += h + if x is None: + x = self.x + self._out(sprintf('q %.2f 0 0 %.2f %.2f %.2f cm /I%d Do Q',w*self.k,h*self.k,x*self.k,(self.h-(y+h))*self.k,info['i'])) + if(link): + self.link(x,y,w,h,link) + + @check_page + def ln(self, h=''): + "Line Feed; default value is last cell height" + self.x=self.l_margin + if(isinstance(h, basestring)): + self.y+=self.lasth + else: + self.y+=h + + def get_x(self): + "Get x position" + return self.x + + def set_x(self, x): + "Set x position" + if(x>=0): + self.x=x + else: + self.x=self.w+x + + def get_y(self): + "Get y position" + return self.y + + def set_y(self, y): + "Set y position and reset x" + self.x=self.l_margin + if(y>=0): + self.y=y + else: + self.y=self.h+y + + def set_xy(self, x,y): + "Set x and y positions" + self.set_y(y) + self.set_x(x) + + def output(self, name='',dest=''): + "Output PDF to some destination" + #Finish document if necessary + if(self.state<3): + self.close() + dest=dest.upper() + if(dest==''): + if(name==''): + name='doc.pdf' + dest='I' + else: + dest='F' + if dest=='I': + print(self.buffer) + elif dest=='D': + print(self.buffer) + elif dest=='F': + #Save to local file + f=open(name,'wb') + if(not f): + self.error('Unable to create output file: '+name) + if PY3K: + # manage binary data as latin1 until PEP461 or similar is implemented + f.write(self.buffer.encode("latin1")) + else: + f.write(self.buffer) + f.close() + elif dest=='S': + #Return as a string + return self.buffer + else: + self.error('Incorrect output destination: '+dest) + return '' + + def normalize_text(self, txt): + "Check that text input is in the correct format/encoding" + # - for TTF unicode fonts: unicode object (utf8 encoding) + # - for built-in fonts: string instances (latin 1 encoding) + if self.unifontsubset and isinstance(txt, str) and not PY3K: + txt = txt.decode('utf8') + elif not self.unifontsubset and isinstance(txt, unicode) and not PY3K: + txt = txt.encode('latin1') + return txt + + + def _dochecks(self): + #Check for locale-related bug +# if(1.1==1): +# self.error("Don\'t alter the locale before including class file"); + #Check for decimal separator + if(sprintf('%.1f',1.0)!='1.0'): + import locale + locale.setlocale(locale.LC_NUMERIC,'C') + + def _getfontpath(self): + return FPDF_FONT_DIR+'/' + + def _putpages(self): + nb=self.page + if hasattr(self,'str_alias_nb_pages'): + # Replace number of pages in fonts using subsets (unicode) + alias = UTF8ToUTF16BE(self.str_alias_nb_pages, False) + r = UTF8ToUTF16BE(str(nb), False) + for n in range(1, nb+1): + self.pages[n] = self.pages[n].replace(alias, r) + # Now repeat for no pages in non-subset fonts + for n in range(1,nb+1): + self.pages[n]=self.pages[n].replace(self.str_alias_nb_pages,str(nb)) + if(self.def_orientation=='P'): + w_pt=self.fw_pt + h_pt=self.fh_pt + else: + w_pt=self.fh_pt + h_pt=self.fw_pt + if self.compress: + filter='/Filter /FlateDecode ' + else: + filter='' + for n in range(1,nb+1): + #Page + self._newobj() + self._out('<>>>' + else: + l=self.links[pl[4]] + if l[0] in self.orientation_changes: + h=w_pt + else: + h=h_pt + annots+=sprintf('/Dest [%d 0 R /XYZ 0 %.2f null]>>',1+2*l[0],h-l[1]*self.k) + self._out(annots+']') + if(self.pdf_version>'1.3'): + self._out('/Group <>') + self._out('/Contents '+str(self.n+1)+' 0 R>>') + self._out('endobj') + #Page content + if self.compress: + # manage binary data as latin1 until PEP461 or similar is implemented + p = self.pages[n].encode("latin1") if PY3K else self.pages[n] + p = zlib.compress(p) + else: + p = self.pages[n] + self._newobj() + self._out('<<'+filter+'/Length '+str(len(p))+'>>') + self._putstream(p) + self._out('endobj') + #Pages root + self.offsets[1]=len(self.buffer) + self._out('1 0 obj') + self._out('<>') + self._out('endobj') + + def _putfonts(self): + nf=self.n + for diff in self.diffs: + #Encodings + self._newobj() + self._out('<>') + self._out('endobj') + for name,info in self.font_files.items(): + if 'type' in info and info['type'] != 'TTF': + #Font file embedding + self._newobj() + self.font_files[name]['n']=self.n + font='' + f=open(self._getfontpath()+name,'rb',1) + if(not f): + self.error('Font file not found') + font=f.read() + f.close() + compressed=(substr(name,-2)=='.z') + if(not compressed and 'length2' in info): + header=(ord(font[0])==128) + if(header): + #Strip first binary header + font=substr(font,6) + if(header and ord(font[info['length1']])==128): + #Strip second binary header + font=substr(font,0,info['length1'])+substr(font,info['length1']+6) + self._out('<>') + self._putstream(font) + self._out('endobj') + flist = [(x[1]["i"],x[0],x[1]) for x in self.fonts.items()] + flist.sort() + for idx,k,font in flist: + #Font objects + self.fonts[k]['n']=self.n+1 + type=font['type'] + name=font['name'] + if(type=='core'): + #Standard font + self._newobj() + self._out('<>') + self._out('endobj') + elif(type=='Type1' or type=='TrueType'): + #Additional Type1 or TrueType font + self._newobj() + self._out('<>') + self._out('endobj') + #Widths + self._newobj() + cw=font['cw'] + s='[' + for i in range(32,256): + # Get doesn't rise exception; returns 0 instead of None if not set + s+=str(cw.get(chr(i)) or 0)+' ' + self._out(s+']') + self._out('endobj') + #Descriptor + self._newobj() + s='<>') + self._out('endobj') + elif (type == 'TTF'): + self.fonts[k]['n'] = self.n + 1 + ttf = TTFontFile() + fontname = 'MPDFAA' + '+' + font['name'] + subset = font['subset'] + del subset[0] + ttfontstream = ttf.makeSubset(font['ttffile'], subset) + ttfontsize = len(ttfontstream) + fontstream = zlib.compress(ttfontstream) + codeToGlyph = ttf.codeToGlyph + ##del codeToGlyph[0] + # Type0 Font + # A composite font - a font composed of other fonts, organized hierarchically + self._newobj() + self._out('<>') + self._out('endobj') + + # CIDFontType2 + # A CIDFont whose glyph descriptions are based on TrueType font technology + self._newobj() + self._out('<>') + self._out('endobj') + + # ToUnicode + self._newobj() + toUni = "/CIDInit /ProcSet findresource begin\n" \ + "12 dict begin\n" \ + "begincmap\n" \ + "/CIDSystemInfo\n" \ + "<> def\n" \ + "/CMapName /Adobe-Identity-UCS def\n" \ + "/CMapType 2 def\n" \ + "1 begincodespacerange\n" \ + "<0000> \n" \ + "endcodespacerange\n" \ + "1 beginbfrange\n" \ + "<0000> <0000>\n" \ + "endbfrange\n" \ + "endcmap\n" \ + "CMapName currentdict /CMap defineresource pop\n" \ + "end\n" \ + "end" + self._out('<>') + self._putstream(toUni) + self._out('endobj') + + # CIDSystemInfo dictionary + self._newobj() + self._out('<>') + self._out('endobj') + + # Font descriptor + self._newobj() + self._out('<>') + self._out('endobj') + + # Embed CIDToGIDMap + # A specification of the mapping from CIDs to glyph indices + cidtogidmap = ''; + cidtogidmap = ["\x00"] * 256*256*2 + for cc, glyph in codeToGlyph.items(): + cidtogidmap[cc*2] = chr(glyph >> 8) + cidtogidmap[cc*2 + 1] = chr(glyph & 0xFF) + cidtogidmap = ''.join(cidtogidmap) + if PY3K: + # manage binary data as latin1 until PEP461-like function is implemented + cidtogidmap = cidtogidmap.encode("latin1") + cidtogidmap = zlib.compress(cidtogidmap); + self._newobj() + self._out('<>') + self._putstream(cidtogidmap) + self._out('endobj') + + #Font file + self._newobj() + self._out('<>') + self._putstream(fontstream) + self._out('endobj') + del ttf + else: + #Allow for additional types + mtd='_put'+type.lower() + if(not method_exists(self,mtd)): + self.error('Unsupported font type: '+type) + self.mtd(font) + + def _putTTfontwidths(self, font, maxUni): + if font['unifilename']: + cw127fname = os.path.splitext(font['unifilename'])[0] + '.cw127.pkl' + else: + cw127fname = None + if cw127fname and os.path.exists(cw127fname): + fh = open(cw127fname, "rb"); + try: + font_dict = pickle.load(fh) + finally: + fh.close() + rangeid = font_dict['rangeid'] + range_ = font_dict['range'] + prevcid = font_dict['prevcid'] + prevwidth = font_dict['prevwidth'] + interval = font_dict['interval'] + range_interval = font_dict['range_interval'] + startcid = 128 + else: + rangeid = 0 + range_ = {} + range_interval = {} + prevcid = -2 + prevwidth = -1 + interval = False + startcid = 1 + cwlen = maxUni + 1 + + # for each character + for cid in range(startcid, cwlen): + if cid == 128 and cw127fname and not os.path.exists(cw127fname): + try: + fh = open(cw127fname, "wb") + font_dict = {} + font_dict['rangeid'] = rangeid + font_dict['prevcid'] = prevcid + font_dict['prevwidth'] = prevwidth + font_dict['interval'] = interval + font_dict['range_interval'] = range_interval + font_dict['range'] = range_ + pickle.dump(font_dict, fh) + fh.close() + except IOError: + if not exception().errno == errno.EACCES: + raise # Not a permission error. + if (font['cw'][cid] == 0): + continue + width = font['cw'][cid] + if (width == 65535): width = 0 + if (cid > 255 and (cid not in font['subset']) or not cid): # + continue + if ('dw' not in font or (font['dw'] and width != font['dw'])): + if (cid == (prevcid + 1)): + if (width == prevwidth): + if (width == range_[rangeid][0]): + range_.setdefault(rangeid, []).append(width) + else: + range_[rangeid].pop() + # new range + rangeid = prevcid + range_[rangeid] = [prevwidth, width] + interval = True + range_interval[rangeid] = True + else: + if (interval): + # new range + rangeid = cid + range_[rangeid] = [width] + else: + range_[rangeid].append(width) + interval = False + else: + rangeid = cid + range_[rangeid] = [width] + interval = False + prevcid = cid + prevwidth = width + prevk = -1 + nextk = -1 + prevint = False + for k, ws in sorted(range_.items()): + cws = len(ws) + if (k == nextk and not prevint and (not k in range_interval or cws < 3)): + if (k in range_interval): + del range_interval[k] + range_[prevk] = range_[prevk] + range_[k] + del range_[k] + else: + prevk = k + nextk = k + cws + if (k in range_interval): + prevint = (cws > 3) + del range_interval[k] + nextk -= 1 + else: + prevint = False + w = [] + for k, ws in sorted(range_.items()): + if (len(set(ws)) == 1): + w.append(' %s %s %s' % (k, k + len(ws) - 1, ws[0])) + else: + w.append(' %s [ %s ]\n' % (k, ' '.join([str(int(h)) for h in ws]))) ## + self._out('/W [%s]' % ''.join(w)) + + def _putimages(self): + filter='' + if self.compress: + filter='/Filter /FlateDecode ' + i = [(x[1]["i"],x[1]) for x in self.images.items()] + i.sort() + for idx,info in i: + self._putimage(info) + del info['data'] + if 'smask' in info: + del info['smask'] + + def _putimage(self, info): + if 'data' in info: + self._newobj() + info['n']=self.n + self._out('<>') + if('trns' in info and isinstance(info['trns'], list)): + trns='' + for i in range(0,len(info['trns'])): + trns+=str(info['trns'][i])+' '+str(info['trns'][i])+' ' + self._out('/Mask ['+trns+']') + if('smask' in info): + self._out('/SMask ' + str(self.n+1) + ' 0 R'); + self._out('/Length '+str(len(info['data']))+'>>') + self._putstream(info['data']) + self._out('endobj') + # Soft mask + if('smask' in info): + dp = '/Predictor 15 /Colors 1 /BitsPerComponent 8 /Columns ' + str(info['w']) + smask = {'w': info['w'], 'h': info['h'], 'cs': 'DeviceGray', 'bpc': 8, 'f': info['f'], 'dp': dp, 'data': info['smask']} + self._putimage(smask) + #Palette + if(info['cs']=='Indexed'): + self._newobj() + filter = self.compress and '/Filter /FlateDecode ' or '' + if self.compress: + pal=zlib.compress(info['pal']) + else: + pal=info['pal'] + self._out('<<'+filter+'/Length '+str(len(pal))+'>>') + self._putstream(pal) + self._out('endobj') + + def _putxobjectdict(self): + i = [(x["i"],x["n"]) for x in self.images.values()] + i.sort() + for idx,n in i: + self._out('/I'+str(idx)+' '+str(n)+' 0 R') + + def _putresourcedict(self): + self._out('/ProcSet [/PDF /Text /ImageB /ImageC /ImageI]') + self._out('/Font <<') + f = [(x["i"],x["n"]) for x in self.fonts.values()] + f.sort() + for idx,n in f: + self._out('/F'+str(idx)+' '+str(n)+' 0 R') + self._out('>>') + self._out('/XObject <<') + self._putxobjectdict() + self._out('>>') + + def _putresources(self): + self._putfonts() + self._putimages() + #Resource dictionary + self.offsets[2]=len(self.buffer) + self._out('2 0 obj') + self._out('<<') + self._putresourcedict() + self._out('>>') + self._out('endobj') + + def _putinfo(self): + self._out('/Producer '+self._textstring('PyFPDF '+FPDF_VERSION+' http://pyfpdf.googlecode.com/')) + if hasattr(self,'title'): + self._out('/Title '+self._textstring(self.title)) + if hasattr(self,'subject'): + self._out('/Subject '+self._textstring(self.subject)) + if hasattr(self,'author'): + self._out('/Author '+self._textstring(self.author)) + if hasattr (self,'keywords'): + self._out('/Keywords '+self._textstring(self.keywords)) + if hasattr(self,'creator'): + self._out('/Creator '+self._textstring(self.creator)) + self._out('/CreationDate '+self._textstring('D:'+datetime.now().strftime('%Y%m%d%H%M%S'))) + + def _putcatalog(self): + self._out('/Type /Catalog') + self._out('/Pages 1 0 R') + if(self.zoom_mode=='fullpage'): + self._out('/OpenAction [3 0 R /Fit]') + elif(self.zoom_mode=='fullwidth'): + self._out('/OpenAction [3 0 R /FitH null]') + elif(self.zoom_mode=='real'): + self._out('/OpenAction [3 0 R /XYZ null null 1]') + elif(not isinstance(self.zoom_mode,basestring)): + self._out(sprintf('/OpenAction [3 0 R /XYZ null null %s]',self.zoom_mode/100)) + if(self.layout_mode=='single'): + self._out('/PageLayout /SinglePage') + elif(self.layout_mode=='continuous'): + self._out('/PageLayout /OneColumn') + elif(self.layout_mode=='two'): + self._out('/PageLayout /TwoColumnLeft') + + def _putheader(self): + self._out('%PDF-'+self.pdf_version) + + def _puttrailer(self): + self._out('/Size '+str(self.n+1)) + self._out('/Root '+str(self.n)+' 0 R') + self._out('/Info '+str(self.n-1)+' 0 R') + + def _enddoc(self): + self._putheader() + self._putpages() + self._putresources() + #Info + self._newobj() + self._out('<<') + self._putinfo() + self._out('>>') + self._out('endobj') + #Catalog + self._newobj() + self._out('<<') + self._putcatalog() + self._out('>>') + self._out('endobj') + #Cross-ref + o=len(self.buffer) + self._out('xref') + self._out('0 '+(str(self.n+1))) + self._out('0000000000 65535 f ') + for i in range(1,self.n+1): + self._out(sprintf('%010d 00000 n ',self.offsets[i])) + #Trailer + self._out('trailer') + self._out('<<') + self._puttrailer() + self._out('>>') + self._out('startxref') + self._out(o) + self._out('%%EOF') + self.state=3 + + def _beginpage(self, orientation): + self.page+=1 + self.pages[self.page]='' + self.state=2 + self.x=self.l_margin + self.y=self.t_margin + self.font_family='' + #Page orientation + if(not orientation): + orientation=self.def_orientation + else: + orientation=orientation[0].upper() + if(orientation!=self.def_orientation): + self.orientation_changes[self.page]=1 + if(orientation!=self.cur_orientation): + #Change orientation + if(orientation=='P'): + self.w_pt=self.fw_pt + self.h_pt=self.fh_pt + self.w=self.fw + self.h=self.fh + else: + self.w_pt=self.fh_pt + self.h_pt=self.fw_pt + self.w=self.fh + self.h=self.fw + self.page_break_trigger=self.h-self.b_margin + self.cur_orientation=orientation + + def _endpage(self): + #End of page contents + self.state=1 + + def _newobj(self): + #Begin a new object + self.n+=1 + self.offsets[self.n]=len(self.buffer) + self._out(str(self.n)+' 0 obj') + + def _dounderline(self, x,y,txt): + #Underline text + up=self.current_font['up'] + ut=self.current_font['ut'] + w=self.get_string_width(txt)+self.ws*txt.count(' ') + return sprintf('%.2f %.2f %.2f %.2f re f',x*self.k,(self.h-(y-up/1000.0*self.font_size))*self.k,w*self.k,-ut/1000.0*self.font_size_pt) + + def _parsejpg(self, filename): + # Extract info from a JPEG file + try: + f = open(filename, 'rb') + while True: + markerHigh, markerLow = struct.unpack('BB', f.read(2)) + if markerHigh != 0xFF or markerLow < 0xC0: + raise SyntaxError('No JPEG marker found') + elif markerLow == 0xDA: # SOS + raise SyntaxError('No JPEG SOF marker found') + elif (markerLow == 0xC8 or # JPG + (markerLow >= 0xD0 and markerLow <= 0xD9) or # RSTx + (markerLow >= 0xF0 and markerLow <= 0xFD)): # JPGx + pass + else: + dataSize, = struct.unpack('>H', f.read(2)) + data = f.read(dataSize - 2) if dataSize > 2 else '' + if ((markerLow >= 0xC0 and markerLow <= 0xC3) or # SOF0 - SOF3 + (markerLow >= 0xC5 and markerLow <= 0xC7) or # SOF4 - SOF7 + (markerLow >= 0xC9 and markerLow <= 0xCB) or # SOF9 - SOF11 + (markerLow >= 0xCD and markerLow <= 0xCF)): # SOF13 - SOF15 + bpc, height, width, layers = struct.unpack_from('>BHHB', data) + colspace = 'DeviceRGB' if layers == 3 else ('DeviceCMYK' if layers == 4 else 'DeviceGray') + break + except Exception: + self.error('Missing or incorrect image file: %s. error: %s' % (filename, str(exception()))) + + # Read whole file from the start + f.seek(0) + data = f.read() + f.close() + return {'w':width,'h':height,'cs':colspace,'bpc':bpc,'f':'DCTDecode','data':data} + + def _parsegif(self, filename): + # Extract info from a GIF file (via PNG conversion) + if Image is None: + self.error('PIL is required for GIF support') + try: + im = Image.open(filename) + except Exception: + self.error('Missing or incorrect image file: %s. error: %s' % (filename, str(exception()))) + else: + # Use temporary file + f = tempfile.NamedTemporaryFile(delete=False, suffix=".png") + tmp = f.name + f.close() + if "transparency" in im.info: + im.save(tmp, transparency = im.info['transparency']) + else: + im.save(tmp) + info = self._parsepng(tmp) + os.unlink(tmp) + return info + + def _parsepng(self, name): + #Extract info from a PNG file + if name.startswith("http://") or name.startswith("https://"): + f = urlopen(name) + else: + f=open(name,'rb') + if(not f): + self.error("Can't open image file: "+name) + #Check signature + magic = f.read(8).decode("latin1") + signature = '\x89'+'PNG'+'\r'+'\n'+'\x1a'+'\n' + if not PY3K: signature = signature.decode("latin1") + if(magic!=signature): + self.error('Not a PNG file: '+name) + #Read header chunk + f.read(4) + chunk = f.read(4).decode("latin1") + if(chunk!='IHDR'): + self.error('Incorrect PNG file: '+name) + w=self._freadint(f) + h=self._freadint(f) + bpc=ord(f.read(1)) + if(bpc>8): + self.error('16-bit depth not supported: '+name) + ct=ord(f.read(1)) + if(ct==0 or ct==4): + colspace='DeviceGray' + elif(ct==2 or ct==6): + colspace='DeviceRGB' + elif(ct==3): + colspace='Indexed' + else: + self.error('Unknown color type: '+name) + if(ord(f.read(1))!=0): + self.error('Unknown compression method: '+name) + if(ord(f.read(1))!=0): + self.error('Unknown filter method: '+name) + if(ord(f.read(1))!=0): + self.error('Interlacing not supported: '+name) + f.read(4) + dp='/Predictor 15 /Colors ' + if colspace == 'DeviceRGB': + dp+='3' + else: + dp+='1' + dp+=' /BitsPerComponent '+str(bpc)+' /Columns '+str(w)+'' + #Scan chunks looking for palette, transparency and image data + pal='' + trns='' + data=bytes() if PY3K else str() + n=1 + while n != None: + n=self._freadint(f) + type=f.read(4).decode("latin1") + if(type=='PLTE'): + #Read palette + pal=f.read(n) + f.read(4) + elif(type=='tRNS'): + #Read transparency info + t=f.read(n) + if(ct==0): + trns=[ord(substr(t,1,1)),] + elif(ct==2): + trns=[ord(substr(t,1,1)),ord(substr(t,3,1)),ord(substr(t,5,1))] + else: + pos=t.find('\x00'.encode("latin1")) + if(pos!=-1): + trns=[pos,] + f.read(4) + elif(type=='IDAT'): + #Read image data block + data+=f.read(n) + f.read(4) + elif(type=='IEND'): + break + else: + f.read(n+4) + if(colspace=='Indexed' and not pal): + self.error('Missing palette in '+name) + f.close() + info = {'w':w,'h':h,'cs':colspace,'bpc':bpc,'f':'FlateDecode','dp':dp,'pal':pal,'trns':trns,} + if(ct>=4): + # Extract alpha channel + data = zlib.decompress(data) + color = b('') + alpha = b('') + if(ct==4): + # Gray image + length = 2*w + for i in range(h): + pos = (1+length)*i + color += b(data[pos]) + alpha += b(data[pos]) + line = substr(data, pos+1, length) + re_c = re.compile('(.).'.encode("ascii"), flags=re.DOTALL) + re_a = re.compile('.(.)'.encode("ascii"), flags=re.DOTALL) + color += re_c.sub(lambda m: m.group(1), line) + alpha += re_a.sub(lambda m: m.group(1), line) + else: + # RGB image + length = 4*w + for i in range(h): + pos = (1+length)*i + color += b(data[pos]) + alpha += b(data[pos]) + line = substr(data, pos+1, length) + re_c = re.compile('(...).'.encode("ascii"), flags=re.DOTALL) + re_a = re.compile('...(.)'.encode("ascii"), flags=re.DOTALL) + color += re_c.sub(lambda m: m.group(1), line) + alpha += re_a.sub(lambda m: m.group(1), line) + del data + data = zlib.compress(color) + info['smask'] = zlib.compress(alpha) + if (self.pdf_version < '1.4'): + self.pdf_version = '1.4' + info['data'] = data + return info + + def _freadint(self, f): + #Read a 4-byte integer from file + try: + return struct.unpack('>I', f.read(4))[0] + except: + return None + + def _textstring(self, s): + #Format a text string + return '('+self._escape(s)+')' + + def _escape(self, s): + #Add \ before \, ( and ) + return s.replace('\\','\\\\').replace(')','\\)').replace('(','\\(').replace('\r','\\r') + + def _putstream(self, s): + self._out('stream') + self._out(s) + self._out('endstream') + + def _out(self, s): + #Add a line to the document + if PY3K and isinstance(s, bytes): + # manage binary data as latin1 until PEP461-like function is implemented + s = s.decode("latin1") + elif not PY3K and isinstance(s, unicode): + s = s.encode("latin1") # default encoding (font name and similar) + elif not isinstance(s, basestring): + s = str(s) + if(self.state==2): + self.pages[self.page]+=s+"\n" + else: + self.buffer+=s+"\n" + + @check_page + def interleaved2of5(self, txt, x, y, w=1.0, h=10.0): + "Barcode I2of5 (numeric), adds a 0 if odd lenght" + narrow = w / 3.0 + wide = w + + # wide/narrow codes for the digits + bar_char={'0': 'nnwwn', '1': 'wnnnw', '2': 'nwnnw', '3': 'wwnnn', + '4': 'nnwnw', '5': 'wnwnn', '6': 'nwwnn', '7': 'nnnww', + '8': 'wnnwn', '9': 'nwnwn', 'A': 'nn', 'Z': 'wn'} + + self.set_fill_color(0) + code = txt + # add leading zero if code-length is odd + if len(code) % 2 != 0: + code = '0' + code + + # add start and stop codes + code = 'AA' + code.lower() + 'ZA' + + for i in range(0, len(code), 2): + # choose next pair of digits + char_bar = code[i] + char_space = code[i+1] + # check whether it is a valid digit + if not char_bar in bar_char.keys(): + raise RuntimeError ('Char "%s" invalid for I25: ' % char_bar) + if not char_space in bar_char.keys(): + raise RuntimeError ('Char "%s" invalid for I25: ' % char_space) + + # create a wide/narrow-seq (first digit=bars, second digit=spaces) + seq = '' + for s in range(0, len(bar_char[char_bar])): + seq += bar_char[char_bar][s] + bar_char[char_space][s] + + for bar in range(0, len(seq)): + # set line_width depending on value + if seq[bar] == 'n': + line_width = narrow + else: + line_width = wide + + # draw every second value, the other is represented by space + if bar % 2 == 0: + self.rect(x, y, line_width, h, 'F') + + x += line_width + + + @check_page + def code39(self, txt, x, y, w=1.5, h=5.0): + """Barcode 3of9""" + dim = {'w': w, 'n': w/3.} + chars = { + '0': 'nnnwwnwnn', '1': 'wnnwnnnnw', '2': 'nnwwnnnnw', + '3': 'wnwwnnnnn', '4': 'nnnwwnnnw', '5': 'wnnwwnnnn', + '6': 'nnwwwnnnn', '7': 'nnnwnnwnw', '8': 'wnnwnnwnn', + '9': 'nnwwnnwnn', 'A': 'wnnnnwnnw', 'B': 'nnwnnwnnw', + 'C': 'wnwnnwnnn', 'D': 'nnnnwwnnw', 'E': 'wnnnwwnnn', + 'F': 'nnwnwwnnn', 'G': 'nnnnnwwnw', 'H': 'wnnnnwwnn', + 'I': 'nnwnnwwnn', 'J': 'nnnnwwwnn', 'K': 'wnnnnnnww', + 'L': 'nnwnnnnww', 'M': 'wnwnnnnwn', 'N': 'nnnnwnnww', + 'O': 'wnnnwnnwn', 'P': 'nnwnwnnwn', 'Q': 'nnnnnnwww', + 'R': 'wnnnnnwwn', 'S': 'nnwnnnwwn', 'T': 'nnnnwnwwn', + 'U': 'wwnnnnnnw', 'V': 'nwwnnnnnw', 'W': 'wwwnnnnnn', + 'X': 'nwnnwnnnw', 'Y': 'wwnnwnnnn', 'Z': 'nwwnwnnnn', + '-': 'nwnnnnwnw', '.': 'wwnnnnwnn', ' ': 'nwwnnnwnn', + '*': 'nwnnwnwnn', '$': 'nwnwnwnnn', '/': 'nwnwnnnwn', + '+': 'nwnnnwnwn', '%': 'nnnwnwnwn', + } + self.set_fill_color(0) + for c in txt.upper(): + if c not in chars: + raise RuntimeError('Invalid char "%s" for Code39' % c) + for i, d in enumerate(chars[c]): + if i % 2 == 0: + self.rect(x, y, dim[d], h, 'F') + x += dim[d] + x += dim['n'] + + diff --git a/lib/python3.12/site-packages/fpdf/html.py b/lib/python3.12/site-packages/fpdf/html.py new file mode 100644 index 0000000..b75b74d --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/html.py @@ -0,0 +1,402 @@ +# -*- coding: latin-1 -*- + +"HTML Renderer for FPDF.py" + +__author__ = "Mariano Reingart " +__copyright__ = "Copyright (C) 2010 Mariano Reingart" +__license__ = "LGPL 3.0" + +# Inspired by tuto5.py and several examples from fpdf.org, html2fpdf, etc. + +from .fpdf import FPDF +from .py3k import PY3K, basestring, unicode, HTMLParser + +DEBUG = False + +def px2mm(px): + return int(px)*25.4/72.0 + +def hex2dec(color = "#000000"): + if color: + r = int(color[1:3], 16) + g = int(color[3:5], 16) + b = int(color[5:7], 16) + return r, g, b + +class HTML2FPDF(HTMLParser): + "Render basic HTML to FPDF" + + def __init__(self, pdf, image_map=None): + HTMLParser.__init__(self) + self.style = {} + self.pre = False + self.href = '' + self.align = '' + self.page_links = {} + self.font = None + self.font_stack = [] + self.pdf = pdf + self.image_map = image_map or (lambda src: src) + self.r = self.g = self.b = 0 + self.indent = 0 + self.bullet = [] + self.set_font("times", 12) + self.font_face = "times" # initialize font + self.color = 0 #initialize font color + self.table = None # table attributes + self.table_col_width = None # column (header) widths + self.table_col_index = None # current column index + self.td = None # cell attributes + self.th = False # header enabled + self.tr = None + self.theader = None # table header cells + self.tfooter = None # table footer cells + self.thead = None + self.tfoot = None + self.theader_out = self.tfooter_out = False + self.hsize = dict(h1=2, h2=1.5, h3=1.17, h4=1, h5=0.83, h6=0.67) + + def width2mm(self, length): + if length[-1]=='%': + total = self.pdf.w - self.pdf.r_margin - self.pdf.l_margin + if self.table['width'][-1]=='%': + total *= int(self.table['width'][:-1])/100.0 + return int(length[:-1]) * total / 101.0 + else: + return int(length) / 6.0 + + def handle_data(self, txt): + if self.td is not None: # drawing a table? + if 'width' not in self.td and 'colspan' not in self.td: + try: + l = [self.table_col_width[self.table_col_index]] + except IndexError: + raise RuntimeError("Table column/cell width not specified, unable to continue") + elif 'colspan' in self.td: + i = self.table_col_index + colspan = int(self.td['colspan']) + l = self.table_col_width[i:i+colspan] + else: + l = [self.td.get('width','240')] + w = sum([self.width2mm(lenght) for lenght in l]) + h = int(self.td.get('height', 0)) / 4 or self.h*1.30 + self.table_h = h + border = int(self.table.get('border', 0)) + if not self.th: + align = self.td.get('align', 'L')[0].upper() + border = border and 'LR' + else: + self.set_style('B',True) + border = border or 'B' + align = self.td.get('align', 'C')[0].upper() + bgcolor = hex2dec(self.td.get('bgcolor', self.tr.get('bgcolor', ''))) + # parsing table header/footer (drawn later): + if self.thead is not None: + self.theader.append(((w,h,txt,border,0,align), bgcolor)) + if self.tfoot is not None: + self.tfooter.append(((w,h,txt,border,0,align), bgcolor)) + # check if reached end of page, add table footer and header: + height = h + (self.tfooter and self.tfooter[0][0][1] or 0) + if self.pdf.y+height>self.pdf.page_break_trigger and not self.th: + self.output_table_footer() + self.pdf.add_page() + self.theader_out = self.tfooter_out = False + if self.tfoot is None and self.thead is None: + if not self.theader_out: + self.output_table_header() + self.box_shadow(w, h, bgcolor) + if DEBUG: print("td cell", self.pdf.x, w, txt, "*") + self.pdf.cell(w,h,txt,border,0,align) + elif self.table is not None: + # ignore anything else than td inside a table + pass + elif self.align: + if DEBUG: print("cell", txt, "*") + self.pdf.cell(0,self.h,txt,0,1,self.align[0].upper(), self.href) + else: + txt = txt.replace("\n"," ") + if self.href: + self.put_link(self.href,txt) + else: + if DEBUG: print("write", txt, "*") + self.pdf.write(self.h,txt) + + def box_shadow(self, w, h, bgcolor): + if DEBUG: print("box_shadow", w, h, bgcolor) + if bgcolor: + fill_color = self.pdf.fill_color + self.pdf.set_fill_color(*bgcolor) + self.pdf.rect(self.pdf.x, self.pdf.y, w, h, 'F') + self.pdf.fill_color = fill_color + + def output_table_header(self): + if self.theader: + b = self.b + x = self.pdf.x + self.pdf.set_x(self.table_offset) + self.set_style('B',True) + for cell, bgcolor in self.theader: + self.box_shadow(cell[0], cell[1], bgcolor) + self.pdf.cell(*cell) + self.set_style('B',b) + self.pdf.ln(self.theader[0][0][1]) + self.pdf.set_x(self.table_offset) + #self.pdf.set_x(x) + self.theader_out = True + + def output_table_footer(self): + if self.tfooter: + x = self.pdf.x + self.pdf.set_x(self.table_offset) + #TODO: self.output_table_sep() + for cell, bgcolor in self.tfooter: + self.box_shadow(cell[0], cell[1], bgcolor) + self.pdf.cell(*cell) + self.pdf.ln(self.tfooter[0][0][1]) + self.pdf.set_x(x) + if int(self.table.get('border', 0)): + self.output_table_sep() + self.tfooter_out = True + + def output_table_sep(self): + self.pdf.set_x(self.table_offset) + x1 = self.pdf.x + y1 = self.pdf.y + w = sum([self.width2mm(lenght) for lenght in self.table_col_width]) + self.pdf.line(x1,y1,x1+w,y1) + + + def handle_starttag(self, tag, attrs): + attrs = dict(attrs) + if DEBUG: print("STARTTAG", tag, attrs) + if tag=='b' or tag=='i' or tag=='u': + self.set_style(tag,1) + if tag=='a': + self.href=attrs['href'] + if tag=='br': + self.pdf.ln(5) + if tag=='p': + self.pdf.ln(5) + if attrs: + if attrs: self.align = attrs.get('align') + if tag in self.hsize: + k = self.hsize[tag] + self.pdf.ln(5*k) + self.pdf.set_text_color(150,0,0) + self.pdf.set_font_size(12 * k) + if attrs: self.align = attrs.get('align') + if tag=='hr': + self.put_line() + if tag=='pre': + self.pdf.set_font('Courier','',11) + self.pdf.set_font_size(11) + self.set_style('B',False) + self.set_style('I',False) + self.pre = True + if tag=='blockquote': + self.set_text_color(100,0,45) + self.pdf.ln(3) + if tag=='ul': + self.indent+=1 + self.bullet.append('\x95') + if tag=='ol': + self.indent+=1 + self.bullet.append(0) + if tag=='li': + self.pdf.ln(self.h+2) + self.pdf.set_text_color(190,0,0) + bullet = self.bullet[self.indent-1] + if not isinstance(bullet, basestring): + bullet += 1 + self.bullet[self.indent-1] = bullet + bullet = "%s. " % bullet + self.pdf.write(self.h,'%s%s ' % (' '*5*self.indent, bullet)) + self.set_text_color() + if tag=='font': + # save previous font state: + self.font_stack.append((self.font_face, self.font_size, self.color)) + if 'color' in attrs: + self.color = hex2dec(attrs['color']) + self.set_text_color(*color) + self.color = color + if 'face' in attrs: + face = attrs.get('face').lower() + try: + self.pdf.set_font(face) + self.font_face = face + except RuntimeError: + pass # font not found, ignore + if 'size' in attrs: + size = int(attrs.get('size')) + self.pdf.set_font(self.font_face, size=int(size)) + self.font_size = size + if tag=='table': + self.table = dict([(k.lower(), v) for k,v in attrs.items()]) + if not 'width' in self.table: + self.table['width'] = '100%' + if self.table['width'][-1]=='%': + w = self.pdf.w - self.pdf.r_margin - self.pdf.l_margin + w *= int(self.table['width'][:-1])/100.0 + self.table_offset = (self.pdf.w-w)/2.0 + self.table_col_width = [] + self.theader_out = self.tfooter_out = False + self.theader = [] + self.tfooter = [] + self.thead = None + self.tfoot = None + self.table_h = 0 + self.pdf.ln() + if tag=='tr': + self.tr = dict([(k.lower(), v) for k,v in attrs.items()]) + self.table_col_index = 0 + self.pdf.set_x(self.table_offset) + if tag=='td': + self.td = dict([(k.lower(), v) for k,v in attrs.items()]) + if tag=='th': + self.td = dict([(k.lower(), v) for k,v in attrs.items()]) + self.th = True + if 'width' in self.td: + self.table_col_width.append(self.td['width']) + if tag=='thead': + self.thead = {} + if tag=='tfoot': + self.tfoot = {} + if tag=='img': + if 'src' in attrs: + x = self.pdf.get_x() + y = self.pdf.get_y() + w = px2mm(attrs.get('width', 0)) + h = px2mm(attrs.get('height',0)) + if self.align and self.align[0].upper() == 'C': + x = (self.pdf.w-x)/2.0 - w/2.0 + self.pdf.image(self.image_map(attrs['src']), + x, y, w, h, link=self.href) + self.pdf.set_x(x+w) + self.pdf.set_y(y+h) + if tag=='b' or tag=='i' or tag=='u': + self.set_style(tag, True) + if tag=='center': + self.align = 'Center' + + def handle_endtag(self, tag): + #Closing tag + if DEBUG: print("ENDTAG", tag) + if tag=='h1' or tag=='h2' or tag=='h3' or tag=='h4': + self.pdf.ln(6) + self.set_font() + self.set_style() + self.align = None + if tag=='pre': + self.pdf.set_font(self.font or 'Times','',12) + self.pdf.set_font_size(12) + self.pre=False + if tag=='blockquote': + self.set_text_color(0,0,0) + self.pdf.ln(3) + if tag=='strong': + tag='b' + if tag=='em': + tag='i' + if tag=='b' or tag=='i' or tag=='u': + self.set_style(tag, False) + if tag=='a': + self.href='' + if tag=='p': + self.align='' + if tag in ('ul', 'ol'): + self.indent-=1 + self.bullet.pop() + if tag=='table': + if not self.tfooter_out: + self.output_table_footer() + self.table = None + self.th = False + self.theader = None + self.tfooter = None + self.pdf.ln() + if tag=='thead': + self.thead = None + if tag=='tfoot': + self.tfoot = None + if tag=='tbody': + # draw a line separator between table bodies + self.pdf.set_x(self.table_offset) + self.output_table_sep() + if tag=='tr': + h = self.table_h + if self.tfoot is None: + self.pdf.ln(h) + self.tr = None + if tag=='td' or tag=='th': + if self.th: + if DEBUG: print("revert style") + self.set_style('B', False) # revert style + self.table_col_index += int(self.td.get('colspan','1')) + self.td = None + self.th = False + if tag=='font': + # recover last font state + face, size, color = self.font_stack.pop() + if face: + self.pdf.set_text_color(0,0,0) + self.color = None + self.set_font(face, size) + self.font = None + if tag=='center': + self.align = None + + def set_font(self, face=None, size=None): + if face: + self.font_face = face + if size: + self.font_size = size + self.h = size / 72.0*25.4 + if DEBUG: print("H", self.h) + self.pdf.set_font(self.font_face or 'times','',12) + self.pdf.set_font_size(self.font_size or 12) + self.set_style('u', False) + self.set_style('b', False) + self.set_style('i', False) + self.set_text_color() + + def set_style(self, tag=None, enable=None): + #Modify style and select corresponding font + if tag: + t = self.style.get(tag.lower()) + self.style[tag.lower()] = enable + style='' + for s in ('b','i','u'): + if self.style.get(s): + style+=s + if DEBUG: print("SET_FONT_STYLE", style) + self.pdf.set_font('',style) + + def set_text_color(self, r=None, g=0, b=0): + if r is None: + self.pdf.set_text_color(self.r,self.g,self.b) + else: + self.pdf.set_text_color(r, g, b) + self.r = r + self.g = g + self.b = b + + def put_link(self, url, txt): + #Put a hyperlink + self.set_text_color(0,0,255) + self.set_style('u', True) + self.pdf.write(5,txt,url) + self.set_style('u', False) + self.set_text_color(0) + + def put_line(self): + self.pdf.ln(2) + self.pdf.line(self.pdf.get_x(),self.pdf.get_y(),self.pdf.get_x()+187,self.pdf.get_y()) + self.pdf.ln(3) + +class HTMLMixin(object): + def write_html(self, text, image_map=None): + "Parse HTML and convert it to PDF" + h2p = HTML2FPDF(self, image_map) + text = h2p.unescape(text) # To deal with HTML entities + h2p.feed(text) + diff --git a/lib/python3.12/site-packages/fpdf/php.py b/lib/python3.12/site-packages/fpdf/php.py new file mode 100644 index 0000000..3371789 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/php.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# -*- coding: latin-1 -*- + +from .py3k import PY3K, basestring, unicode + +# fpdf php helpers: + +def substr(s, start, length=-1): + if length < 0: + length=len(s)-start + return s[start:start+length] + +def sprintf(fmt, *args): return fmt % args + +def print_r(array): + if not isinstance(array, dict): + array = dict([(k, k) for k in array]) + for k, v in array.items(): + print("[%s] => %s " % (k, v)) + +def UTF8ToUTF16BE(instr, setbom=True): + "Converts UTF-8 strings to UTF16-BE." + outstr = "".encode() + if (setbom): + outstr += "\xFE\xFF".encode("latin1") + if not isinstance(instr, unicode): + instr = instr.decode('UTF-8') + outstr += instr.encode('UTF-16BE') + # convert bytes back to fake unicode string until PEP461-like is implemented + if PY3K: + outstr = outstr.decode("latin1") + return outstr + +def UTF8StringToArray(instr): + "Converts UTF-8 strings to codepoints array" + return [ord(c) for c in instr] + +# ttfints php helpers: + +def die(msg): + raise RuntimeError(msg) + +def str_repeat(s, count): + return s * count + +def str_pad(s, pad_length=0, pad_char= " ", pad_type= +1 ): + if pad_type<0: # pad left + return s.rjust(pad_length, pad_char) + elif pad_type>0: # pad right + return s.ljust(pad_length, pad_char) + else: # pad both + return s.center(pad_length, pad_char) + +strlen = count = lambda s: len(s) diff --git a/lib/python3.12/site-packages/fpdf/py3k.py b/lib/python3.12/site-packages/fpdf/py3k.py new file mode 100644 index 0000000..eb97658 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/py3k.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"Special module to handle differences between Python 2 and 3 versions" + +import sys + +PY3K = sys.version_info >= (3, 0) + +try: + import cPickle as pickle +except ImportError: + import pickle + +try: + from urllib import urlopen +except ImportError: + from urllib.request import urlopen + +try: + from hashlib import md5 +except ImportError: + try: + from md5 import md5 + except ImportError: + md5 = None +def hashpath(fn): + h = md5() + if PY3K: + h.update(fn.encode("UTF-8")) + else: + h.update(fn) + return h.hexdigest() + +# Check if PIL is available (tries importing both pypi version and corrected or manually installed versions). +# Necessary for JPEG and GIF support. +# TODO: Pillow support +try: + from PIL import Image +except ImportError: + try: + import Image + except ImportError: + Image = None + +try: + from HTMLParser import HTMLParser +except ImportError: + from html.parser import HTMLParser + +if PY3K: + basestring = str + unicode = str + ord = lambda x: x +else: + basestring = basestring + unicode = unicode + ord = ord + +# shortcut to bytes conversion (b prefix) +def b(s): + if isinstance(s, basestring): + return s.encode("latin1") + elif isinstance(s, int): + if PY3K: + return bytes([s]) # http://bugs.python.org/issue4588 + else: + return chr(s) + +def exception(): + "Return the current the exception instance currently being handled" + # this is needed to support Python 2.5 that lacks "as" syntax + return sys.exc_info()[1] + + diff --git a/lib/python3.12/site-packages/fpdf/template.py b/lib/python3.12/site-packages/fpdf/template.py new file mode 100644 index 0000000..154b391 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/template.py @@ -0,0 +1,226 @@ +# -*- coding: iso-8859-1 -*- + +"PDF Template Helper for FPDF.py" + +__author__ = "Mariano Reingart " +__copyright__ = "Copyright (C) 2010 Mariano Reingart" +__license__ = "LGPL 3.0" + +import sys,os,csv +from .fpdf import FPDF +from .py3k import PY3K, basestring, unicode + +def rgb(col): + return (col // 65536), (col // 256 % 256), (col% 256) + +class Template: + def __init__(self, infile=None, elements=None, format='A4', orientation='portrait', + title='', author='', subject='', creator='', keywords=''): + if elements: + self.load_elements(elements) + self.handlers = {'T': self.text, 'L': self.line, 'I': self.image, + 'B': self.rect, 'BC': self.barcode, 'W': self.write, } + self.texts = {} + pdf = self.pdf = FPDF(format=format,orientation=orientation, unit="mm") + pdf.set_title(title) + pdf.set_author(author) + pdf.set_creator(creator) + pdf.set_subject(subject) + pdf.set_keywords(keywords) + + def load_elements(self, elements): + "Initialize the internal element structures" + self.pg_no = 0 + self.elements = elements + self.keys = [v['name'].lower() for v in self.elements] + + def parse_csv(self, infile, delimiter=",", decimal_sep="."): + "Parse template format csv file and create elements dict" + keys = ('name','type','x1','y1','x2','y2','font','size', + 'bold','italic','underline','foreground','background', + 'align','text','priority', 'multiline') + self.elements = [] + self.pg_no = 0 + if not PY3K: + f = open(infile, 'rb') + else: + f = open(infile) + for row in csv.reader(f, delimiter=delimiter): + kargs = {} + for i,v in enumerate(row): + if not v.startswith("'") and decimal_sep!=".": + v = v.replace(decimal_sep,".") + else: + v = v + if v=='': + v = None + else: + v = eval(v.strip()) + kargs[keys[i]] = v + self.elements.append(kargs) + self.keys = [v['name'].lower() for v in self.elements] + + def add_page(self): + self.pg_no += 1 + self.texts[self.pg_no] = {} + + def __setitem__(self, name, value): + if name.lower() in self.keys: + if not PY3K and isinstance(value, unicode): + value = value.encode("latin1","ignore") + elif value is None: + value = "" + else: + value = str(value) + self.texts[self.pg_no][name.lower()] = value + + # setitem shortcut (may be further extended) + set = __setitem__ + + def has_key(self, name): + return name.lower() in self.keys + + def __getitem__(self, name): + if name in self.keys: + key = name.lower() + if key in self.texts: + # text for this page: + return self.texts[self.pg_no][key] + else: + # find first element for default text: + elements = [element for element in self.elements + if element['name'].lower() == key] + if elements: + return elements[0]['text'] + + def split_multicell(self, text, element_name): + "Divide (\n) a string using a given element width" + pdf = self.pdf + element = [element for element in self.elements + if element['name'].lower() == element_name.lower()][0] + style = "" + if element['bold']: style += "B" + if element['italic']: style += "I" + if element['underline']: style += "U" + pdf.set_font(element['font'],style,element['size']) + align = {'L':'L','R':'R','I':'L','D':'R','C':'C','':''}.get(element['align']) # D/I in spanish + if isinstance(text, unicode) and not PY3K: + text = text.encode("latin1","ignore") + else: + text = str(text) + return pdf.multi_cell(w=element['x2']-element['x1'], + h=element['y2']-element['y1'], + txt=text,align=align,split_only=True) + + def render(self, outfile, dest="F"): + pdf = self.pdf + for pg in range(1, self.pg_no+1): + pdf.add_page() + pdf.set_font('Arial','B',16) + pdf.set_auto_page_break(False,margin=0) + + for element in sorted(self.elements,key=lambda x: x['priority']): + #print "dib",element['type'], element['name'], element['x1'], element['y1'], element['x2'], element['y2'] + element = element.copy() + element['text'] = self.texts[pg].get(element['name'].lower(), element['text']) + if 'rotate' in element: + pdf.rotate(element['rotate'], element['x1'], element['y1']) + self.handlers[element['type'].upper()](pdf, **element) + if 'rotate' in element: + pdf.rotate(0) + + if dest: + return pdf.output(outfile, dest) + + def text(self, pdf, x1=0, y1=0, x2=0, y2=0, text='', font="arial", size=10, + bold=False, italic=False, underline=False, align="", + foreground=0, backgroud=65535, multiline=None, + *args, **kwargs): + if text: + if pdf.text_color!=rgb(foreground): + pdf.set_text_color(*rgb(foreground)) + if pdf.fill_color!=rgb(backgroud): + pdf.set_fill_color(*rgb(backgroud)) + + font = font.strip().lower() + if font == 'arial black': + font = 'arial' + style = "" + for tag in 'B', 'I', 'U': + if (text.startswith("<%s>" % tag) and text.endswith("" %tag)): + text = text[3:-4] + style += tag + if bold: style += "B" + if italic: style += "I" + if underline: style += "U" + align = {'L':'L','R':'R','I':'L','D':'R','C':'C','':''}.get(align) # D/I in spanish + pdf.set_font(font,style,size) + ##m_k = 72 / 2.54 + ##h = (size/m_k) + pdf.set_xy(x1,y1) + if multiline is None: + # multiline==None: write without wrapping/trimming (default) + pdf.cell(w=x2-x1,h=y2-y1,txt=text,border=0,ln=0,align=align) + elif multiline: + # multiline==True: automatic word - warp + pdf.multi_cell(w=x2-x1,h=y2-y1,txt=text,border=0,align=align) + else: + # multiline==False: trim to fit exactly the space defined + text = pdf.multi_cell(w=x2-x1, h=y2-y1, + txt=text, align=align, split_only=True)[0] + print("trimming: *%s*" % text) + pdf.cell(w=x2-x1,h=y2-y1,txt=text,border=0,ln=0,align=align) + + #pdf.Text(x=x1,y=y1,txt=text) + + def line(self, pdf, x1=0, y1=0, x2=0, y2=0, size=0, foreground=0, *args, **kwargs): + if pdf.draw_color!=rgb(foreground): + #print "SetDrawColor", hex(foreground) + pdf.set_draw_color(*rgb(foreground)) + #print "SetLineWidth", size + pdf.set_line_width(size) + pdf.line(x1, y1, x2, y2) + + def rect(self, pdf, x1=0, y1=0, x2=0, y2=0, size=0, foreground=0, backgroud=65535, *args, **kwargs): + if pdf.draw_color!=rgb(foreground): + pdf.set_draw_color(*rgb(foreground)) + if pdf.fill_color!=rgb(backgroud): + pdf.set_fill_color(*rgb(backgroud)) + pdf.set_line_width(size) + pdf.rect(x1, y1, x2-x1, y2-y1) + + def image(self, pdf, x1=0, y1=0, x2=0, y2=0, text='', *args,**kwargs): + if text: + pdf.image(text,x1,y1,w=x2-x1,h=y2-y1,type='',link='') + + def barcode(self, pdf, x1=0, y1=0, x2=0, y2=0, text='', font="arial", size=1, + foreground=0, *args, **kwargs): + if pdf.draw_color!=rgb(foreground): + pdf.set_draw_color(*rgb(foreground)) + font = font.lower().strip() + if font == 'interleaved 2of5 nt': + pdf.interleaved2of5(text,x1,y1,w=size,h=y2-y1) + + # Added by Derek Schwalenberg Schwalenberg1013@gmail.com to allow (url) links in templates (using write method) 2014-02-22 + def write(self, pdf, x1=0, y1=0, x2=0, y2=0, text='', font="arial", size=1, + bold=False, italic=False, underline=False, align="", link='http://example.com', + foreground=0, *args, **kwargs): + if pdf.text_color!=rgb(foreground): + pdf.set_text_color(*rgb(foreground)) + font = font.strip().lower() + if font == 'arial black': + font = 'arial' + style = "" + for tag in 'B', 'I', 'U': + if (text.startswith("<%s>" % tag) and text.endswith("" %tag)): + text = text[3:-4] + style += tag + if bold: style += "B" + if italic: style += "I" + if underline: style += "U" + align = {'L':'L','R':'R','I':'L','D':'R','C':'C','':''}.get(align) # D/I in spanish + pdf.set_font(font,style,size) + ##m_k = 72 / 2.54 + ##h = (size/m_k) + pdf.set_xy(x1,y1) + pdf.write(5,text,link) diff --git a/lib/python3.12/site-packages/fpdf/ttfonts.py b/lib/python3.12/site-packages/fpdf/ttfonts.py new file mode 100644 index 0000000..5e664a1 --- /dev/null +++ b/lib/python3.12/site-packages/fpdf/ttfonts.py @@ -0,0 +1,1067 @@ +#****************************************************************************** +# TTFontFile class +# +# This class is based on The ReportLab Open Source PDF library +# written in Python - http://www.reportlab.com/software/opensource/ +# together with ideas from the OpenOffice source code and others. +# +# Version: 1.04 +# Date: 2011-09-18 +# Author: Ian Back +# License: LGPL +# Copyright (c) Ian Back, 2010 +# Ported to Python 2.7 by Mariano Reingart (reingart@gmail.com) on 2012 +# This header must be retained in any redistribution or +# modification of the file. +# +#****************************************************************************** + +from struct import pack, unpack, unpack_from +import re +import warnings +from .php import die, substr, str_repeat, str_pad, strlen, count +from .py3k import b, ord + + +# Define the value used in the "head" table of a created TTF file +# 0x74727565 "true" for Mac +# 0x00010000 for Windows +# Either seems to work for a font embedded in a PDF file +# when read by Adobe Reader on a Windows PC(!) +_TTF_MAC_HEADER = False + + +# TrueType Font Glyph operators +GF_WORDS = (1 << 0) +GF_SCALE = (1 << 3) +GF_MORE = (1 << 5) +GF_XYSCALE = (1 << 6) +GF_TWOBYTWO = (1 << 7) + + +def sub32(x, y): + xlo = x[1] + xhi = x[0] + ylo = y[1] + yhi = y[0] + if (ylo > xlo): + xlo += 1 << 16 + yhi += 1 + reslo = xlo-ylo + if (yhi > xhi): + xhi += 1 << 16 + reshi = xhi-yhi + reshi = reshi & 0xFFFF + return (reshi, reslo) + +def calcChecksum(data): + if (strlen(data) % 4): + data += str_repeat(b("\0"), (4-(len(data) % 4))) + hi=0x0000 + lo=0x0000 + for i in range(0, len(data), 4): + hi += (ord(data[i])<<8) + ord(data[i+1]) + lo += (ord(data[i+2])<<8) + ord(data[i+3]) + hi += lo >> 16 + lo = lo & 0xFFFF + hi = hi & 0xFFFF + return (hi, lo) + + +class TTFontFile: + + def __init__(self): + self.maxStrLenRead = 200000 # Maximum size of glyf table to read in as string (otherwise reads each glyph from file) + + def getMetrics(self, file): + self.filename = file + self.fh = open(file,'rb') + self._pos = 0 + self.charWidths = [] + self.glyphPos = {} + self.charToGlyph = {} + self.tables = {} + self.otables = {} + self.ascent = 0 + self.descent = 0 + self.TTCFonts = {} + self.version = version = self.read_ulong() + if (version==0x4F54544F): + die("Postscript outlines are not supported") + if (version==0x74746366): + die("ERROR - TrueType Fonts Collections not supported") + if (version not in (0x00010000,0x74727565)): + die("Not a TrueType font: version=" + version) + self.readTableDirectory() + self.extractInfo() + self.fh.close() + + def readTableDirectory(self, ): + self.numTables = self.read_ushort() + self.searchRange = self.read_ushort() + self.entrySelector = self.read_ushort() + self.rangeShift = self.read_ushort() + self.tables = {} + for i in range(self.numTables): + record = {} + record['tag'] = self.read_tag() + record['checksum'] = (self.read_ushort(),self.read_ushort()) + record['offset'] = self.read_ulong() + record['length'] = self.read_ulong() + self.tables[record['tag']] = record + + def get_table_pos(self, tag): + offset = self.tables[tag]['offset'] + length = self.tables[tag]['length'] + return (offset, length) + + def seek(self, pos): + self._pos = pos + self.fh.seek(self._pos) + + def skip(self, delta): + self._pos = self._pos + delta + self.fh.seek(self._pos) + + def seek_table(self, tag, offset_in_table = 0): + tpos = self.get_table_pos(tag) + self._pos = tpos[0] + offset_in_table + self.fh.seek(self._pos) + return self._pos + + def read_tag(self): + self._pos += 4 + return self.fh.read(4).decode("latin1") + + def read_short(self): + self._pos += 2 + s = self.fh.read(2) + a = (ord(s[0])<<8) + ord(s[1]) + if (a & (1 << 15) ): + a = (a - (1 << 16)) + return a + + def unpack_short(self, s): + a = (ord(s[0])<<8) + ord(s[1]) + if (a & (1 << 15) ): + a = (a - (1 << 16)) + return a + + def read_ushort(self): + self._pos += 2 + s = self.fh.read(2) + return (ord(s[0])<<8) + ord(s[1]) + + def read_ulong(self): + self._pos += 4 + s = self.fh.read(4) + # if large uInt32 as an integer, PHP converts it to -ve + return (ord(s[0])*16777216) + (ord(s[1])<<16) + (ord(s[2])<<8) + ord(s[3]) # 16777216 = 1<<24 + + def get_ushort(self, pos): + self.fh.seek(pos) + s = self.fh.read(2) + return (ord(s[0])<<8) + ord(s[1]) + + def get_ulong(self, pos): + self.fh.seek(pos) + s = self.fh.read(4) + # iF large uInt32 as an integer, PHP converts it to -ve + return (ord(s[0])*16777216) + (ord(s[1])<<16) + (ord(s[2])<<8) + ord(s[3]) # 16777216 = 1<<24 + + def pack_short(self, val): + if (val<0): + val = abs(val) + val = ~val + val += 1 + return pack(">H",val) + + def splice(self, stream, offset, value): + return substr(stream,0,offset) + value + substr(stream,offset+strlen(value)) + + def _set_ushort(self, stream, offset, value): + up = pack(">H", value) + return self.splice(stream, offset, up) + + def _set_short(self, stream, offset, val): + if (val<0): + val = abs(val) + val = ~val + val += 1 + up = pack(">H",val) + return self.splice(stream, offset, up) + + def get_chunk(self, pos, length): + self.fh.seek(pos) + if (length <1): return '' + return (self.fh.read(length)) + + def get_table(self, tag): + (pos, length) = self.get_table_pos(tag) + if (length == 0): + die('Truetype font (' + self.filename + '): error reading table: ' + tag) + self.fh.seek(pos) + return (self.fh.read(length)) + + def add(self, tag, data): + if (tag == 'head') : + data = self.splice(data, 8, b("\0\0\0\0")) + self.otables[tag] = data + +############################################/ +############################################/ + +############################################/ + + def extractInfo(self): + #################/ + # name - Naming table + #################/ + self.sFamilyClass = 0 + self.sFamilySubClass = 0 + + name_offset = self.seek_table("name") + format = self.read_ushort() + if (format != 0): + die("Unknown name table format " + format) + numRecords = self.read_ushort() + string_data_offset = name_offset + self.read_ushort() + names = {1:'',2:'',3:'',4:'',6:''} + K = list(names.keys()) + nameCount = len(names) + for i in range(numRecords): + platformId = self.read_ushort() + encodingId = self.read_ushort() + languageId = self.read_ushort() + nameId = self.read_ushort() + length = self.read_ushort() + offset = self.read_ushort() + if (nameId not in K): continue + N = '' + if (platformId == 3 and encodingId == 1 and languageId == 0x409): # Microsoft, Unicode, US English, PS Name + opos = self._pos + self.seek(string_data_offset + offset) + if (length % 2 != 0): + die("PostScript name is UTF-16BE string of odd length") + length /= 2 + N = '' + while (length > 0): + char = self.read_ushort() + N += (chr(char)) + length -= 1 + self._pos = opos + self.seek(opos) + + elif (platformId == 1 and encodingId == 0 and languageId == 0): # Macintosh, Roman, English, PS Name + opos = self._pos + N = self.get_chunk(string_data_offset + offset, length).decode("latin1") + self._pos = opos + self.seek(opos) + + if (N and names[nameId]==''): + names[nameId] = N + nameCount -= 1 + if (nameCount==0): break + + + if (names[6]): + psName = names[6] + elif (names[4]): + psName = re.sub(' ','-',names[4]) + elif (names[1]): + psName = re.sub(' ','-',names[1]) + else: + psName = '' + if (not psName): + die("Could not find PostScript font name") + self.name = psName + if (names[1]): + self.familyName = names[1] + else: + self.familyName = psName + if (names[2]): + self.styleName = names[2] + else: + self.styleName = 'Regular' + if (names[4]): + self.fullName = names[4] + else: + self.fullName = psName + if (names[3]): + self.uniqueFontID = names[3] + else: + self.uniqueFontID = psName + if (names[6]): + self.fullName = names[6] + + #################/ + # head - Font header table + #################/ + self.seek_table("head") + self.skip(18) + self.unitsPerEm = unitsPerEm = self.read_ushort() + scale = 1000 / float(unitsPerEm) + self.skip(16) + xMin = self.read_short() + yMin = self.read_short() + xMax = self.read_short() + yMax = self.read_short() + self.bbox = [(xMin*scale), (yMin*scale), (xMax*scale), (yMax*scale)] + self.skip(3*2) + indexToLocFormat = self.read_ushort() + glyphDataFormat = self.read_ushort() + if (glyphDataFormat != 0): + die('Unknown glyph data format ' + glyphDataFormat) + + #################/ + # hhea metrics table + #################/ + # ttf2t1 seems to use this value rather than the one in OS/2 - so put in for compatibility + if ("hhea" in self.tables): + self.seek_table("hhea") + self.skip(4) + hheaAscender = self.read_short() + hheaDescender = self.read_short() + self.ascent = (hheaAscender *scale) + self.descent = (hheaDescender *scale) + + + #################/ + # OS/2 - OS/2 and Windows metrics table + #################/ + if ("OS/2" in self.tables): + self.seek_table("OS/2") + version = self.read_ushort() + self.skip(2) + usWeightClass = self.read_ushort() + self.skip(2) + fsType = self.read_ushort() + if (fsType == 0x0002 or (fsType & 0x0300) != 0): + die('ERROR - Font file ' + self.filename + ' cannot be embedded due to copyright restrictions.') + self.restrictedUse = True + + self.skip(20) + sF = self.read_short() + self.sFamilyClass = (sF >> 8) + self.sFamilySubClass = (sF & 0xFF) + self._pos += 10 #PANOSE = 10 byte length + panose = self.fh.read(10) + self.skip(26) + sTypoAscender = self.read_short() + sTypoDescender = self.read_short() + if (not self.ascent): + self.ascent = (sTypoAscender*scale) + if (not self.descent): + self.descent = (sTypoDescender*scale) + if (version > 1): + self.skip(16) + sCapHeight = self.read_short() + self.capHeight = (sCapHeight*scale) + else: + self.capHeight = self.ascent + + else: + usWeightClass = 500 + if (not self.ascent): self.ascent = (yMax*scale) + if (not self.descent): self.descent = (yMin*scale) + self.capHeight = self.ascent + + self.stemV = 50 + int(pow((usWeightClass / 65.0),2)) + + #################/ + # post - PostScript table + #################/ + self.seek_table("post") + self.skip(4) + self.italicAngle = self.read_short() + self.read_ushort() / 65536.0 + self.underlinePosition = self.read_short() * scale + self.underlineThickness = self.read_short() * scale + isFixedPitch = self.read_ulong() + + self.flags = 4 + + if (self.italicAngle!= 0): + self.flags = self.flags | 64 + if (usWeightClass >= 600): + self.flags = self.flags | 262144 + if (isFixedPitch): + self.flags = self.flags | 1 + + #################/ + # hhea - Horizontal header table + #################/ + self.seek_table("hhea") + self.skip(32) + metricDataFormat = self.read_ushort() + if (metricDataFormat != 0): + die('Unknown horizontal metric data format '.metricDataFormat) + numberOfHMetrics = self.read_ushort() + if (numberOfHMetrics == 0): + die('Number of horizontal metrics is 0') + + #################/ + # maxp - Maximum profile table + #################/ + self.seek_table("maxp") + self.skip(4) + numGlyphs = self.read_ushort() + + #################/ + # cmap - Character to glyph index mapping table + #################/ + cmap_offset = self.seek_table("cmap") + self.skip(2) + cmapTableCount = self.read_ushort() + unicode_cmap_offset = 0 + unicode_cmap_offset12 = 0 + + for i in range(cmapTableCount): + platformID = self.read_ushort() + encodingID = self.read_ushort() + offset = self.read_ulong() + save_pos = self._pos + if platformID == 3 and encodingID == 10: # Microsoft, UCS-4 + format = self.get_ushort(cmap_offset + offset) + if (format == 12): + if not unicode_cmap_offset12: + unicode_cmap_offset12 = cmap_offset + offset + break + if ((platformID == 3 and encodingID == 1) or platformID == 0): # Microsoft, Unicode + format = self.get_ushort(cmap_offset + offset) + if (format == 4): + if (not unicode_cmap_offset): + unicode_cmap_offset = cmap_offset + offset + break + + self.seek(save_pos) + + if not unicode_cmap_offset and not unicode_cmap_offset12: + die('Font (' + self.filename + ') does not have cmap for Unicode (platform 3, encoding 1, format 4, or platform 3, encoding 10, format 12, or platform 0, any encoding, format 4)') + + glyphToChar = {} + charToGlyph = {} + if unicode_cmap_offset12: + self.getCMAP12(unicode_cmap_offset12, glyphToChar, charToGlyph) + else: + self.getCMAP4(unicode_cmap_offset, glyphToChar, charToGlyph) + + #################/ + # hmtx - Horizontal metrics table + #################/ + self.getHMTX(numberOfHMetrics, numGlyphs, glyphToChar, scale) + + +############################################/ +############################################/ + + def makeSubset(self, file, subset): + self.filename = file + self.fh = open(file ,'rb') + self._pos = 0 + self.charWidths = [] + self.glyphPos = {} + self.charToGlyph = {} + self.tables = {} + self.otables = {} + self.ascent = 0 + self.descent = 0 + self.skip(4) + self.maxUni = 0 + self.readTableDirectory() + + #################/ + # head - Font header table + #################/ + self.seek_table("head") + self.skip(50) + indexToLocFormat = self.read_ushort() + glyphDataFormat = self.read_ushort() + + #################/ + # hhea - Horizontal header table + #################/ + self.seek_table("hhea") + self.skip(32) + metricDataFormat = self.read_ushort() + orignHmetrics = numberOfHMetrics = self.read_ushort() + + #################/ + # maxp - Maximum profile table + #################/ + self.seek_table("maxp") + self.skip(4) + numGlyphs = self.read_ushort() + + #################/ + # cmap - Character to glyph index mapping table + #################/ + cmap_offset = self.seek_table("cmap") + self.skip(2) + cmapTableCount = self.read_ushort() + unicode_cmap_offset = 0 + unicode_cmap_offset12 = 0 + for i in range(cmapTableCount): + platformID = self.read_ushort() + encodingID = self.read_ushort() + offset = self.read_ulong() + save_pos = self._pos + if platformID == 3 and encodingID == 10: # Microsoft, UCS-4 + format = self.get_ushort(cmap_offset + offset) + if (format == 12): + if not unicode_cmap_offset12: + unicode_cmap_offset12 = cmap_offset + offset + break + if ((platformID == 3 and encodingID == 1) or platformID == 0): # Microsoft, Unicode + format = self.get_ushort(cmap_offset + offset) + if (format == 4): + unicode_cmap_offset = cmap_offset + offset + break + + self.seek(save_pos ) + + if not unicode_cmap_offset and not unicode_cmap_offset12: + die('Font (' + self.filename + ') does not have cmap for Unicode (platform 3, encoding 1, format 4, or platform 3, encoding 10, format 12, or platform 0, any encoding, format 4)') + + glyphToChar = {} + charToGlyph = {} + if unicode_cmap_offset12: + self.getCMAP12(unicode_cmap_offset12, glyphToChar, charToGlyph) + else: + self.getCMAP4(unicode_cmap_offset, glyphToChar, charToGlyph) + + self.charToGlyph = charToGlyph + + #################/ + # hmtx - Horizontal metrics table + #################/ + scale = 1 # not used + self.getHMTX(numberOfHMetrics, numGlyphs, glyphToChar, scale) + + #################/ + # loca - Index to location + #################/ + self.getLOCA(indexToLocFormat, numGlyphs) + + subsetglyphs = [(0, 0)] # special "sorted dict"! + subsetCharToGlyph = {} + for code in subset: + if (code in self.charToGlyph): + if (self.charToGlyph[code], code) not in subsetglyphs: + subsetglyphs.append((self.charToGlyph[code], code)) # Old Glyph ID => Unicode + subsetCharToGlyph[code] = self.charToGlyph[code] # Unicode to old GlyphID + self.maxUni = max(self.maxUni, code) + (start,dummy) = self.get_table_pos('glyf') + + subsetglyphs.sort() + glyphSet = {} + n = 0 + fsLastCharIndex = 0 # maximum Unicode index (character code) in this font, according to the cmap subtable for platform ID 3 and platform- specific encoding ID 0 or 1. + for originalGlyphIdx, uni in subsetglyphs: + fsLastCharIndex = max(fsLastCharIndex , uni) + glyphSet[originalGlyphIdx] = n # old glyphID to new glyphID + n += 1 + + codeToGlyph = {} + for uni, originalGlyphIdx in sorted(subsetCharToGlyph.items()): + codeToGlyph[uni] = glyphSet[originalGlyphIdx] + + self.codeToGlyph = codeToGlyph + + for originalGlyphIdx, uni in subsetglyphs: + nonlocals = {'start': start, 'glyphSet': glyphSet, + 'subsetglyphs': subsetglyphs} + self.getGlyphs(originalGlyphIdx, nonlocals) + + numGlyphs = numberOfHMetrics = len(subsetglyphs) + + #tables copied from the original + tags = ['name'] + for tag in tags: + self.add(tag, self.get_table(tag)) + tags = ['cvt ', 'fpgm', 'prep', 'gasp'] + for tag in tags: + if (tag in self.tables): + self.add(tag, self.get_table(tag)) + + # post - PostScript + opost = self.get_table('post') + post = b("\x00\x03\x00\x00") + substr(opost,4,12) + b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00") + self.add('post', post) + + # Sort CID2GID map into segments of contiguous codes + if 0 in codeToGlyph: + del codeToGlyph[0] + #unset(codeToGlyph[65535]) + rangeid = 0 + range_ = {} + prevcid = -2 + prevglidx = -1 + # for each character + for cid, glidx in sorted(codeToGlyph.items()): + if (cid == (prevcid + 1) and glidx == (prevglidx + 1)): + range_[rangeid].append(glidx) + else: + # new range + rangeid = cid + range_[rangeid] = [] + range_[rangeid].append(glidx) + prevcid = cid + prevglidx = glidx + + # cmap - Character to glyph mapping - Format 4 (MS / ) + segCount = len(range_) + 1 # + 1 Last segment has missing character 0xFFFF + searchRange = 1 + entrySelector = 0 + while (searchRange * 2 <= segCount ): + searchRange = searchRange * 2 + entrySelector = entrySelector + 1 + + searchRange = searchRange * 2 + rangeShift = segCount * 2 - searchRange + length = 16 + (8*segCount ) + (numGlyphs+1) + cmap = [0, 1, # Index : version, number of encoding subtables + 3, 1, # Encoding Subtable : platform (MS=3), encoding (Unicode) + 0, 12, # Encoding Subtable : offset (hi,lo) + 4, length, 0, # Format 4 Mapping subtable: format, length, language + segCount*2, + searchRange, + entrySelector, + rangeShift] + + range_ = sorted(range_.items()) + + # endCode(s) + for start, subrange in range_: + endCode = start + (len(subrange)-1) + cmap.append(endCode) # endCode(s) + + cmap.append(0xFFFF) # endCode of last Segment + cmap.append(0) # reservedPad + + # startCode(s) + for start, subrange in range_: + cmap.append(start) # startCode(s) + + cmap.append(0xFFFF) # startCode of last Segment + # idDelta(s) + for start, subrange in range_: + idDelta = -(start-subrange[0]) + n += count(subrange) + cmap.append(idDelta) # idDelta(s) + + cmap.append(1) # idDelta of last Segment + # idRangeOffset(s) + for subrange in range_: + cmap.append(0) # idRangeOffset[segCount] Offset in bytes to glyph indexArray, or 0 + + cmap.append(0) # idRangeOffset of last Segment + for subrange, glidx in range_: + cmap.extend(glidx) + + cmap.append(0) # Mapping for last character + cmapstr = b('') + for cm in cmap: + if cm >= 0: + cmapstr += pack(">H", cm) + else: + try: + cmapstr += pack(">h", cm) + except: + warnings.warn("cmap value too big/small: %s" % cm) + cmapstr += pack(">H", -cm) + self.add('cmap', cmapstr) + + # glyf - Glyph data + (glyfOffset,glyfLength) = self.get_table_pos('glyf') + if (glyfLength < self.maxStrLenRead): + glyphData = self.get_table('glyf') + + offsets = [] + glyf = b('') + pos = 0 + + hmtxstr = b('') + xMinT = 0 + yMinT = 0 + xMaxT = 0 + yMaxT = 0 + advanceWidthMax = 0 + minLeftSideBearing = 0 + minRightSideBearing = 0 + xMaxExtent = 0 + maxPoints = 0 # points in non-compound glyph + maxContours = 0 # contours in non-compound glyph + maxComponentPoints = 0 # points in compound glyph + maxComponentContours = 0 # contours in compound glyph + maxComponentElements = 0 # number of glyphs referenced at top level + maxComponentDepth = 0 # levels of recursion, set to 0 if font has only simple glyphs + self.glyphdata = {} + + for originalGlyphIdx, uni in subsetglyphs: + # hmtx - Horizontal Metrics + hm = self.getHMetric(orignHmetrics, originalGlyphIdx) + hmtxstr += hm + + offsets.append(pos) + try: + glyphPos = self.glyphPos[originalGlyphIdx] + glyphLen = self.glyphPos[originalGlyphIdx + 1] - glyphPos + except IndexError: + warnings.warn("missing glyph %s" % (originalGlyphIdx)) + glyphLen = 0 + + if (glyfLength < self.maxStrLenRead): + data = substr(glyphData,glyphPos,glyphLen) + else: + if (glyphLen > 0): + data = self.get_chunk(glyfOffset+glyphPos,glyphLen) + else: + data = b('') + + if (glyphLen > 0): + up = unpack(">H", substr(data,0,2))[0] + if (glyphLen > 2 and (up & (1 << 15)) ): # If number of contours <= -1 i.e. composiste glyph + pos_in_glyph = 10 + flags = GF_MORE + nComponentElements = 0 + while (flags & GF_MORE): + nComponentElements += 1 # number of glyphs referenced at top level + up = unpack(">H", substr(data,pos_in_glyph,2)) + flags = up[0] + up = unpack(">H", substr(data,pos_in_glyph+2,2)) + glyphIdx = up[0] + self.glyphdata.setdefault(originalGlyphIdx, {}).setdefault('compGlyphs', []).append(glyphIdx) + try: + data = self._set_ushort(data, pos_in_glyph + 2, glyphSet[glyphIdx]) + except KeyError: + data = 0 + warnings.warn("missing glyph data %s" % glyphIdx) + pos_in_glyph += 4 + if (flags & GF_WORDS): + pos_in_glyph += 4 + else: + pos_in_glyph += 2 + if (flags & GF_SCALE): + pos_in_glyph += 2 + elif (flags & GF_XYSCALE): + pos_in_glyph += 4 + elif (flags & GF_TWOBYTWO): + pos_in_glyph += 8 + + maxComponentElements = max(maxComponentElements, nComponentElements) + + glyf += data + pos += glyphLen + if (pos % 4 != 0): + padding = 4 - (pos % 4) + glyf += str_repeat(b("\0"),padding) + pos += padding + + offsets.append(pos) + self.add('glyf', glyf) + + # hmtx - Horizontal Metrics + self.add('hmtx', hmtxstr) + + # loca - Index to location + locastr = b('') + if (((pos + 1) >> 1) > 0xFFFF): + indexToLocFormat = 1 # long format + for offset in offsets: + locastr += pack(">L",offset) + else: + indexToLocFormat = 0 # short format + for offset in offsets: + locastr += pack(">H",int(offset/2)) + + self.add('loca', locastr) + + # head - Font header + head = self.get_table('head') + head = self._set_ushort(head, 50, indexToLocFormat) + self.add('head', head) + + # hhea - Horizontal Header + hhea = self.get_table('hhea') + hhea = self._set_ushort(hhea, 34, numberOfHMetrics) + self.add('hhea', hhea) + + # maxp - Maximum Profile + maxp = self.get_table('maxp') + maxp = self._set_ushort(maxp, 4, numGlyphs) + self.add('maxp', maxp) + + # OS/2 - OS/2 + os2 = self.get_table('OS/2') + self.add('OS/2', os2 ) + + self.fh.close() + + # Put the TTF file together + stm = self.endTTFile('') + return stm + + + ######################################### + # Recursively get composite glyph data + def getGlyphData(self, originalGlyphIdx, nonlocals): + # &maxdepth, &depth, &points, &contours + nonlocals['depth'] += 1 + nonlocals['maxdepth'] = max(nonlocals['maxdepth'], nonlocals['depth']) + if (len(self.glyphdata[originalGlyphIdx]['compGlyphs'])): + for glyphIdx in self.glyphdata[originalGlyphIdx]['compGlyphs']: + self.getGlyphData(glyphIdx, nonlocals) + + elif ((self.glyphdata[originalGlyphIdx]['nContours'] > 0) and nonlocals['depth'] > 0): # simple + contours += self.glyphdata[originalGlyphIdx]['nContours'] + points += self.glyphdata[originalGlyphIdx]['nPoints'] + + nonlocals['depth'] -= 1 + + + ######################################### + # Recursively get composite glyphs + def getGlyphs(self, originalGlyphIdx, nonlocals): + # &start, &glyphSet, &subsetglyphs) + + try: + glyphPos = self.glyphPos[originalGlyphIdx] + glyphLen = self.glyphPos[originalGlyphIdx + 1] - glyphPos + except IndexError: + warnings.warn("missing glyph %s" % (originalGlyphIdx)) + return + + if (not glyphLen): + return + + self.seek(nonlocals['start'] + glyphPos) + numberOfContours = self.read_short() + if (numberOfContours < 0): + self.skip(8) + flags = GF_MORE + while (flags & GF_MORE): + flags = self.read_ushort() + glyphIdx = self.read_ushort() + if (glyphIdx not in nonlocals['glyphSet']): + nonlocals['glyphSet'][glyphIdx] = len(nonlocals['subsetglyphs']) # old glyphID to new glyphID + nonlocals['subsetglyphs'].append((glyphIdx, 1)) + + savepos = self.fh.tell() + self.getGlyphs(glyphIdx, nonlocals) + self.seek(savepos) + if (flags & GF_WORDS): + self.skip(4) + else: + self.skip(2) + if (flags & GF_SCALE): + self.skip(2) + elif (flags & GF_XYSCALE): + self.skip(4) + elif (flags & GF_TWOBYTWO): + self.skip(8) + + ######################################### + + def getHMTX(self, numberOfHMetrics, numGlyphs, glyphToChar, scale): + start = self.seek_table("hmtx") + aw = 0 + self.charWidths = [0] * 256*256 + nCharWidths = 0 + if ((numberOfHMetrics*4) < self.maxStrLenRead): + data = self.get_chunk(start,(numberOfHMetrics*4)) + arr = unpack(">%dH" % (int(len(data)/2)), data) + else: + self.seek(start) + for glyph in range(numberOfHMetrics): + if ((numberOfHMetrics*4) < self.maxStrLenRead): + aw = arr[(glyph*2)] # PHP starts arrays from index 0!? +1 + else: + aw = self.read_ushort() + lsb = self.read_ushort() + + if (glyph in glyphToChar or glyph == 0): + if (aw >= (1 << 15) ): + aw = 0 # 1.03 Some (arabic) fonts have -ve values for width + # although should be unsigned value - comes out as e.g. 65108 (intended -50) + if (glyph == 0): + self.defaultWidth = scale*aw + continue + + for char in glyphToChar[glyph]: + if (char != 0 and char != 65535): + w = int(round(scale*aw+0.001)) # ROUND_HALF_UP in PY3K (like php) + if (w == 0): w = 65535 + if (char < 196608): + self.charWidths[char] = w + nCharWidths += 1 + + + data = self.get_chunk((start+numberOfHMetrics*4),(numGlyphs*2)) + arr = unpack(">%dH" % (int(len(data)/2)), data) + diff = numGlyphs-numberOfHMetrics + for pos in range(diff): + glyph = pos + numberOfHMetrics + if (glyph in glyphToChar): + for char in glyphToChar[glyph]: + if (char != 0 and char != 65535): + w = int(round(scale*aw+0.001)) # ROUND_HALF_UP in PY3K (like php) + if (w == 0): w = 65535 + if (char < 196608): + self.charWidths[char] = w + nCharWidths += 1 + + + # NB 65535 is a set width of 0 + # First bytes define number of chars in font + self.charWidths[0] = nCharWidths + + + def getHMetric(self, numberOfHMetrics, gid): + start = self.seek_table("hmtx") + if (gid < numberOfHMetrics): + self.seek(start+(gid*4)) + hm = self.fh.read(4) + else: + self.seek(start+((numberOfHMetrics-1)*4)) + hm = self.fh.read(2) + self.seek(start+(numberOfHMetrics*2)+(gid*2)) + hm += self.fh.read(2) + return hm + + + def getLOCA(self, indexToLocFormat, numGlyphs): + start = self.seek_table('loca') + self.glyphPos = [] + if (indexToLocFormat == 0): + data = self.get_chunk(start,(numGlyphs*2)+2) + arr = unpack(">%dH" % (int(len(data)/2)), data) + for n in range(numGlyphs): + self.glyphPos.append((arr[n] * 2)) # n+1 !? + elif (indexToLocFormat == 1): + data = self.get_chunk(start,(numGlyphs*4)+4) + arr = unpack(">%dL" % (int(len(data)/4)), data) + for n in range(numGlyphs): + self.glyphPos.append((arr[n])) # n+1 !? + else: + die('Unknown location table format ' + indexToLocFormat) + + # CMAP Format 4 + def getCMAP4(self, unicode_cmap_offset, glyphToChar, charToGlyph): + self.maxUniChar = 0 + self.seek(unicode_cmap_offset + 2) + length = self.read_ushort() + limit = unicode_cmap_offset + length + self.skip(2) + + segCount = int(self.read_ushort() / 2) + self.skip(6) + endCount = [] + for i in range(segCount): + endCount.append(self.read_ushort()) + self.skip(2) + startCount = [] + for i in range(segCount): + startCount.append(self.read_ushort()) + idDelta = [] + for i in range(segCount): + idDelta.append(self.read_short()) # ???? was unsigned short + idRangeOffset_start = self._pos + idRangeOffset = [] + for i in range(segCount): + idRangeOffset.append(self.read_ushort()) + + for n in range(segCount): + endpoint = (endCount[n] + 1) + for unichar in range(startCount[n], endpoint, 1): + if (idRangeOffset[n] == 0): + glyph = (unichar + idDelta[n]) & 0xFFFF + else: + offset = (unichar - startCount[n]) * 2 + idRangeOffset[n] + offset = idRangeOffset_start + 2 * n + offset + if (offset >= limit): + glyph = 0 + else: + glyph = self.get_ushort(offset) + if (glyph != 0): + glyph = (glyph + idDelta[n]) & 0xFFFF + + charToGlyph[unichar] = glyph + if (unichar < 196608): + self.maxUniChar = max(unichar,self.maxUniChar) + glyphToChar.setdefault(glyph, []).append(unichar) + + # CMAP Format 12 + def getCMAP12(self, unicode_cmap_offset, glyphToChar, charToGlyph): + self.maxUniChar = 0 + # table (skip format version, should be 12) + self.seek(unicode_cmap_offset + 2) + # reserved + self.skip(2) + # table length + length = self.read_ulong() + # language (should be 0) + self.skip(4) + # groups count + grpCount = self.read_ulong() + + if 2 + 2 + 4 + 4 + 4 + grpCount * 3 * 4 > length: + die("TTF format 12 cmap table too small") + for n in range(grpCount): + startCharCode = self.read_ulong() + endCharCode = self.read_ulong() + glyph = self.read_ulong() + for unichar in range(startCharCode, endCharCode + 1): + charToGlyph[unichar] = glyph + if (unichar < 196608): + self.maxUniChar = max(unichar, self.maxUniChar) + glyphToChar.setdefault(glyph, []).append(unichar) + glyph += 1 + + + + # Put the TTF file together + def endTTFile(self, stm): + stm = b('') + numTables = count(self.otables) + searchRange = 1 + entrySelector = 0 + while (searchRange * 2 <= numTables): + searchRange = searchRange * 2 + entrySelector = entrySelector + 1 + + searchRange = searchRange * 16 + rangeShift = numTables * 16 - searchRange + + # Header + if (_TTF_MAC_HEADER): + stm += (pack(">LHHHH", 0x74727565, numTables, searchRange, entrySelector, rangeShift)) # Mac + else: + stm += (pack(">LHHHH", 0x00010000 , numTables, searchRange, entrySelector, rangeShift)) # Windows + + + # Table directory + tables = self.otables + + offset = 12 + numTables * 16 + sorted_tables = sorted(tables.items()) + for tag, data in sorted_tables: + if (tag == 'head'): + head_start = offset + stm += tag.encode("latin1") + checksum = calcChecksum(data) + stm += pack(">HH", checksum[0],checksum[1]) + stm += pack(">LL", offset, strlen(data)) + paddedLength = (strlen(data)+3)&~3 + offset = offset + paddedLength + + # Table data + for tag, data in sorted_tables: + data += b("\0\0\0") + stm += substr(data,0,(strlen(data)&~3)) + + checksum = calcChecksum(stm) + checksum = sub32((0xB1B0,0xAFBA), checksum) + chk = pack(">HH", checksum[0],checksum[1]) + stm = self.splice(stm,(head_start + 8),chk) + return stm + diff --git a/lib/python3.12/site-packages/jinja2/__init__.py b/lib/python3.12/site-packages/jinja2/__init__.py new file mode 100644 index 0000000..af5d428 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/__init__.py @@ -0,0 +1,37 @@ +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. +""" +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import is_undefined as is_undefined +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape + +__version__ = "3.1.3" diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..78aab9c Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/__init__.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc new file mode 100644 index 0000000..09a5819 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/_identifier.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc new file mode 100644 index 0000000..a9ab77c Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/async_utils.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc new file mode 100644 index 0000000..f67d5b2 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/bccache.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc new file mode 100644 index 0000000..2cc23f8 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/compiler.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc new file mode 100644 index 0000000..b2b9be4 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/constants.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc new file mode 100644 index 0000000..eda48bc Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/debug.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc new file mode 100644 index 0000000..1831bba Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/defaults.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc new file mode 100644 index 0000000..8fc155d Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/environment.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..4618797 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/exceptions.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc new file mode 100644 index 0000000..631aefa Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/ext.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc new file mode 100644 index 0000000..658285a Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/filters.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc new file mode 100644 index 0000000..92317ba Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/idtracking.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc new file mode 100644 index 0000000..88bf309 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/lexer.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc new file mode 100644 index 0000000..de3f1c6 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/loaders.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc new file mode 100644 index 0000000..c26e1cd Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/meta.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc new file mode 100644 index 0000000..f54b42c Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/nativetypes.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc new file mode 100644 index 0000000..c0efb2c Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/nodes.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc new file mode 100644 index 0000000..779bd8f Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/optimizer.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc new file mode 100644 index 0000000..86b0210 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/parser.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc new file mode 100644 index 0000000..3fc3ce6 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/runtime.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc new file mode 100644 index 0000000..3e8a607 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/sandbox.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc new file mode 100644 index 0000000..c8316b4 Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/tests.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..014211d Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/utils.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc b/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc new file mode 100644 index 0000000..3e95e8e Binary files /dev/null and b/lib/python3.12/site-packages/jinja2/__pycache__/visitor.cpython-312.pyc differ diff --git a/lib/python3.12/site-packages/jinja2/_identifier.py b/lib/python3.12/site-packages/jinja2/_identifier.py new file mode 100644 index 0000000..928c150 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/_identifier.py @@ -0,0 +1,6 @@ +import re + +# generated by scripts/generate_identifier_pattern.py +pattern = re.compile( + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 +) diff --git a/lib/python3.12/site-packages/jinja2/async_utils.py b/lib/python3.12/site-packages/jinja2/async_utils.py new file mode 100644 index 0000000..715d701 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/async_utils.py @@ -0,0 +1,84 @@ +import inspect +import typing as t +from functools import WRAPPER_ASSIGNMENTS +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + # Take the doc and annotations from the sync function, but the + # name from the async function. Pallets-Sphinx-Themes + # build_function_directive expects __wrapped__ to point to the + # sync function. + async_func_attrs = ("__module__", "__name__", "__qualname__") + normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) + + @wraps(normal_func, assigned=normal_func_attrs) + @wraps(async_func, assigned=async_func_attrs, updated=()) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return t.cast("V", value) + + +async def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + async for item in t.cast("t.AsyncIterable[V]", iterable): + yield item + else: + for item in iterable: + yield item + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)] diff --git a/lib/python3.12/site-packages/jinja2/bccache.py b/lib/python3.12/site-packages/jinja2/bccache.py new file mode 100644 index 0000000..d0ddf56 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/bccache.py @@ -0,0 +1,406 @@ +"""The optional bytecode cache system. This is useful if you have very +complex template situations and the compilation of all those templates +slows down your application too much. + +Situations where this is useful are often forking web applications that +are initialized on the first request. +""" +import errno +import fnmatch +import marshal +import os +import pickle +import stat +import sys +import tempfile +import typing as t +from hashlib import sha1 +from io import BytesIO +from types import CodeType + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: + ... + + def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: + ... + + +bc_version = 5 +# Magic bytes to identify Jinja bytecode cache files. Contains the +# Python major and minor version to avoid loading incompatible bytecode +# if a project upgrades its Python version. +bc_magic = ( + b"j2" + + pickle.dumps(bc_version, 2) + + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) +) + + +class Bucket: + """Buckets are used to store the bytecode for one template. It's created + and initialized by the bytecode cache and passed to the loading functions. + + The buckets get an internal checksum from the cache assigned and use this + to automatically reject outdated cache material. Individual bytecode + cache subclasses don't have to care about cache invalidation. + """ + + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: + self.environment = environment + self.key = key + self.checksum = checksum + self.reset() + + def reset(self) -> None: + """Resets the bucket (unloads the bytecode).""" + self.code: t.Optional[CodeType] = None + + def load_bytecode(self, f: t.BinaryIO) -> None: + """Loads bytecode from a file or file like object.""" + # make sure the magic header is correct + magic = f.read(len(bc_magic)) + if magic != bc_magic: + self.reset() + return + # the source code of the file changed, we need to reload + checksum = pickle.load(f) + if self.checksum != checksum: + self.reset() + return + # if marshal_load fails then we need to reload + try: + self.code = marshal.load(f) + except (EOFError, ValueError, TypeError): + self.reset() + return + + def write_bytecode(self, f: t.IO[bytes]) -> None: + """Dump the bytecode into the file or file like object passed.""" + if self.code is None: + raise TypeError("can't write empty bucket") + f.write(bc_magic) + pickle.dump(self.checksum, f, 2) + marshal.dump(self.code, f) + + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" + self.load_bytecode(BytesIO(string)) + + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" + out = BytesIO() + self.write_bytecode(out) + return out.getvalue() + + +class BytecodeCache: + """To implement your own bytecode cache you have to subclass this class + and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of + these methods are passed a :class:`~jinja2.bccache.Bucket`. + + A very basic bytecode cache that saves the bytecode on the file system:: + + from os import path + + class MyCache(BytecodeCache): + + def __init__(self, directory): + self.directory = directory + + def load_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + if path.exists(filename): + with open(filename, 'rb') as f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + with open(filename, 'wb') as f: + bucket.write_bytecode(f) + + A more advanced version of a filesystem based bytecode cache is part of + Jinja. + """ + + def load_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to load bytecode into a + bucket. If they are not able to find code in the cache for the + bucket, it must not do anything. + """ + raise NotImplementedError() + + def dump_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to write the bytecode + from a bucket back to the cache. If it unable to do so it must not + fail silently but raise an exception. + """ + raise NotImplementedError() + + def clear(self) -> None: + """Clears the cache. This method is not used by Jinja but should be + implemented to allow applications to clear the bytecode cache used + by a particular environment. + """ + + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: + """Returns the unique hash key for this template name.""" + hash = sha1(name.encode("utf-8")) + + if filename is not None: + hash.update(f"|{filename}".encode()) + + return hash.hexdigest() + + def get_source_checksum(self, source: str) -> str: + """Returns a checksum for the source.""" + return sha1(source.encode("utf-8")).hexdigest() + + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: + """Return a cache bucket for the given template. All arguments are + mandatory but filename may be `None`. + """ + key = self.get_cache_key(name, filename) + checksum = self.get_source_checksum(source) + bucket = Bucket(environment, key, checksum) + self.load_bytecode(bucket) + return bucket + + def set_bucket(self, bucket: Bucket) -> None: + """Put the bucket into the cache.""" + self.dump_bytecode(bucket) + + +class FileSystemBytecodeCache(BytecodeCache): + """A bytecode cache that stores bytecode on the filesystem. It accepts + two arguments: The directory where the cache items are stored and a + pattern string that is used to build the filename. + + If no directory is specified a default cache directory is selected. On + Windows the user's temp directory is used, on UNIX systems a directory + is created for the user in the system temp directory. + + The pattern can be used to have multiple separate caches operate on the + same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` + is replaced with the cache key. + + >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') + + This bytecode cache supports clearing of the cache using the clear method. + """ + + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: + if directory is None: + directory = self._get_default_cache_dir() + self.directory = directory + self.pattern = pattern + + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": + raise RuntimeError( + "Cannot determine safe temp directory. You " + "need to explicitly provide one." + ) + + tmpdir = tempfile.gettempdir() + + # On windows the temporary directory is used specific unless + # explicitly forced otherwise. We can just use that. + if os.name == "nt": + return tmpdir + if not hasattr(os, "getuid"): + _unsafe_dir() + + dirname = f"_jinja2-cache-{os.getuid()}" + actual_dir = os.path.join(tmpdir, dirname) + + try: + os.mkdir(actual_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + raise + try: + os.chmod(actual_dir, stat.S_IRWXU) + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + except OSError as e: + if e.errno != errno.EEXIST: + raise + + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + + return actual_dir + + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) + + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) + + # Don't test for existence before opening the file, since the + # file could disappear after the test before the open. + try: + f = open(filename, "rb") + except (FileNotFoundError, IsADirectoryError, PermissionError): + # PermissionError can occur on Windows when an operation is + # in progress, such as calling clear(). + return + + with f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket: Bucket) -> None: + # Write to a temporary file, then rename to the real name after + # writing. This avoids another process reading the file before + # it is fully written. + name = self._get_cache_filename(bucket) + f = tempfile.NamedTemporaryFile( + mode="wb", + dir=os.path.dirname(name), + prefix=os.path.basename(name), + suffix=".tmp", + delete=False, + ) + + def remove_silent() -> None: + try: + os.remove(f.name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + pass + + try: + with f: + bucket.write_bytecode(f) + except BaseException: + remove_silent() + raise + + try: + os.replace(f.name, name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + remove_silent() + except BaseException: + remove_silent() + raise + + def clear(self) -> None: + # imported lazily here because google app-engine doesn't support + # write access on the file system and the function does not exist + # normally. + from os import remove + + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) + for filename in files: + try: + remove(os.path.join(self.directory, filename)) + except OSError: + pass + + +class MemcachedBytecodeCache(BytecodeCache): + """This class implements a bytecode cache that uses a memcache cache for + storing the information. It does not enforce a specific memcache library + (tummy's memcache or cmemcache) but will accept any class that provides + the minimal interface required. + + Libraries compatible with this class: + + - `cachelib `_ + - `python-memcached `_ + + (Unfortunately the django cache interface is not compatible because it + does not support storing binary data, only text. You can however pass + the underlying cache client to the bytecode cache which is available + as `django.core.cache.cache._client`.) + + The minimal interface for the client passed to the constructor is this: + + .. class:: MinimalClientInterface + + .. method:: set(key, value[, timeout]) + + Stores the bytecode in the cache. `value` is a string and + `timeout` the timeout of the key. If timeout is not provided + a default timeout or no timeout should be assumed, if it's + provided it's an integer with the number of seconds the cache + item should exist. + + .. method:: get(key) + + Returns the value for the cache key. If the item does not + exist in the cache the return value must be `None`. + + The other arguments to the constructor are the prefix for all keys that + is added before the actual cache key and the timeout for the bytecode in + the cache system. We recommend a high (or no) timeout. + + This bytecode cache does not support clearing of used items in the cache. + The clear method is a no-operation function. + + .. versionadded:: 2.7 + Added support for ignoring memcache errors through the + `ignore_memcache_errors` parameter. + """ + + def __init__( + self, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, + ): + self.client = client + self.prefix = prefix + self.timeout = timeout + self.ignore_memcache_errors = ignore_memcache_errors + + def load_bytecode(self, bucket: Bucket) -> None: + try: + code = self.client.get(self.prefix + bucket.key) + except Exception: + if not self.ignore_memcache_errors: + raise + else: + bucket.bytecode_from_string(code) + + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + + try: + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) + except Exception: + if not self.ignore_memcache_errors: + raise diff --git a/lib/python3.12/site-packages/jinja2/compiler.py b/lib/python3.12/site-packages/jinja2/compiler.py new file mode 100644 index 0000000..ff95c80 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/compiler.py @@ -0,0 +1,1956 @@ +"""Compiles nodes from the parser into Python code.""" +import typing as t +from contextlib import contextmanager +from functools import update_wrapper +from io import StringIO +from itertools import chain +from keyword import iskeyword as is_python_keyword + +from markupsafe import escape +from markupsafe import Markup + +from . import nodes +from .exceptions import TemplateAssertionError +from .idtracking import Symbols +from .idtracking import VAR_LOAD_ALIAS +from .idtracking import VAR_LOAD_PARAMETER +from .idtracking import VAR_LOAD_RESOLVE +from .idtracking import VAR_LOAD_UNDEFINED +from .nodes import EvalContext +from .optimizer import Optimizer +from .utils import _PassArg +from .utils import concat +from .visitor import NodeVisitor + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +operators = { + "eq": "==", + "ne": "!=", + "gt": ">", + "gteq": ">=", + "lt": "<", + "lteq": "<=", + "in": "in", + "notin": "not in", +} + + +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: + # Only optimize if the frame is not volatile + if self.optimizer is not None and not frame.eval_ctx.volatile: + new_node = self.optimizer.visit(node, frame.eval_ctx) + + if new_node != node: + return self.visit(new_node, frame) + + return f(self, node, frame, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor + + +def generate( + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: + """Generate the python source for a node tree.""" + if not isinstance(node, nodes.Template): + raise TypeError("Can't compile non template nodes") + + generator = environment.code_generator_class( + environment, name, filename, stream, defer_init, optimized + ) + generator.visit(node) + + if stream is None: + return generator.stream.getvalue() # type: ignore + + return None + + +def has_safe_repr(value: t.Any) -> bool: + """Does the node have a safe representation?""" + if value is None or value is NotImplemented or value is Ellipsis: + return True + + if type(value) in {bool, int, float, complex, range, str, Markup}: + return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + + return False + + +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: + """Check if the names passed are accessed undeclared. The return value + is a set of all the undeclared names from the sequence of names found. + """ + visitor = UndeclaredNameVisitor(names) + try: + for node in nodes: + visitor.visit(node) + except VisitorExit: + pass + return visitor.undeclared + + +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: + self.node = node + self.accesses_caller = False + self.accesses_kwargs = False + self.accesses_varargs = False + + +class Frame: + """Holds compile time information for us.""" + + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: + self.eval_ctx = eval_ctx + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block + + # a toplevel frame is the root + soft frames such as if conditions. + self.toplevel = False + + # the root frame is basically just the outermost frame, so no if + # conditions. This information is used to optimize inheritance + # situations. + self.rootlevel = False + + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False + + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False + + def copy(self) -> "Frame": + """Create a copy of the current one.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.symbols = self.symbols.copy() + return rv + + def inner(self, isolated: bool = False) -> "Frame": + """Return an inner frame.""" + if isolated: + return Frame(self.eval_ctx, level=self.symbols.level + 1) + return Frame(self.eval_ctx, self) + + def soft(self) -> "Frame": + """Return a soft frame. A soft frame may not be modified as + standalone thing as it shares the resources with the frame it + was created of, but it's not a rootlevel frame any longer. + + This is only used to implement if-statements and conditional + expressions. + """ + rv = self.copy() + rv.rootlevel = False + rv.soft_frame = True + return rv + + __copy__ = copy + + +class VisitorExit(RuntimeError): + """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" + + +class DependencyFinderVisitor(NodeVisitor): + """A visitor that collects filter and test calls.""" + + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() + + def visit_Filter(self, node: nodes.Filter) -> None: + self.generic_visit(node) + self.filters.add(node.name) + + def visit_Test(self, node: nodes.Test) -> None: + self.generic_visit(node) + self.tests.add(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting at blocks.""" + + +class UndeclaredNameVisitor(NodeVisitor): + """A visitor that checks if a name is accessed without being + declared. This is different from the frame visitor as it will + not stop at closure frames. + """ + + def __init__(self, names: t.Iterable[str]) -> None: + self.names = set(names) + self.undeclared: t.Set[str] = set() + + def visit_Name(self, node: nodes.Name) -> None: + if node.ctx == "load" and node.name in self.names: + self.undeclared.add(node.name) + if self.undeclared == self.names: + raise VisitorExit() + else: + self.names.discard(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting a blocks.""" + + +class CompilerExit(Exception): + """Raised if the compiler encountered a situation where it just + doesn't make sense to further process the code. Any block that + raises such an exception is not further processed. + """ + + +class CodeGenerator(NodeVisitor): + def __init__( + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: + if stream is None: + stream = StringIO() + self.environment = environment + self.name = name + self.filename = filename + self.stream = stream + self.created_block_context = False + self.defer_init = defer_init + self.optimizer: t.Optional[Optimizer] = None + + if optimized: + self.optimizer = Optimizer(environment) + + # aliases for imports + self.import_aliases: t.Dict[str, str] = {} + + # a registry for all blocks. Because blocks are moved out + # into the global python scope they are registered here + self.blocks: t.Dict[str, nodes.Block] = {} + + # the number of extends statements so far + self.extends_so_far = 0 + + # some templates have a rootlevel extends. In this case we + # can safely assume that we're a child template and do some + # more optimizations. + self.has_known_extends = False + + # the current line number + self.code_lineno = 1 + + # registry of all filters and tests (global, not block local) + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} + + # the debug information + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None + + # the number of new lines before the next write() + self._new_lines = 0 + + # the line number of the last written statement + self._last_line = 0 + + # true if nothing was written so far. + self._first_write = True + + # used by the `temporary_identifier` method to get new + # unique, temporary identifier + self._last_identifier = 0 + + # the current indentation + self._indentation = 0 + + # Tracks toplevel assignments + self._assign_stack: t.List[t.Set[str]] = [] + + # Tracks parameter definition blocks + self._param_def_block: t.List[t.Set[str]] = [] + + # Tracks the current context. + self._context_reference_stack = ["context"] + + @property + def optimized(self) -> bool: + return self.optimizer is not None + + # -- Various compilation helpers + + def fail(self, msg: str, lineno: int) -> "te.NoReturn": + """Fail with a :exc:`TemplateAssertionError`.""" + raise TemplateAssertionError(msg, lineno, self.name, self.filename) + + def temporary_identifier(self) -> str: + """Get a new unique identifier.""" + self._last_identifier += 1 + return f"t_{self._last_identifier}" + + def buffer(self, frame: Frame) -> None: + """Enable buffering for the frame from that point onwards.""" + frame.buffer = self.temporary_identifier() + self.writeline(f"{frame.buffer} = []") + + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: + """Return the buffer contents of the frame.""" + if not force_unescaped: + if frame.eval_ctx.volatile: + self.writeline("if context.eval_ctx.autoescape:") + self.indent() + self.writeline(f"return Markup(concat({frame.buffer}))") + self.outdent() + self.writeline("else:") + self.indent() + self.writeline(f"return concat({frame.buffer})") + self.outdent() + return + elif frame.eval_ctx.autoescape: + self.writeline(f"return Markup(concat({frame.buffer}))") + return + self.writeline(f"return concat({frame.buffer})") + + def indent(self) -> None: + """Indent by one.""" + self._indentation += 1 + + def outdent(self, step: int = 1) -> None: + """Outdent by step.""" + self._indentation -= step + + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: + """Yield or write into the frame buffer.""" + if frame.buffer is None: + self.writeline("yield ", node) + else: + self.writeline(f"{frame.buffer}.append(", node) + + def end_write(self, frame: Frame) -> None: + """End the writing process started by `start_write`.""" + if frame.buffer is not None: + self.write(")") + + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: + """Simple shortcut for start_write + write + end_write.""" + self.start_write(frame, node) + self.write(s) + self.end_write(frame) + + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: + """Visit a list of nodes as block in a frame. If the current frame + is no buffer a dummy ``if 0: yield None`` is written automatically. + """ + try: + self.writeline("pass") + for node in nodes: + self.visit(node, frame) + except CompilerExit: + pass + + def write(self, x: str) -> None: + """Write a string into the output stream.""" + if self._new_lines: + if not self._first_write: + self.stream.write("\n" * self._new_lines) + self.code_lineno += self._new_lines + if self._write_debug_info is not None: + self.debug_info.append((self._write_debug_info, self.code_lineno)) + self._write_debug_info = None + self._first_write = False + self.stream.write(" " * self._indentation) + self._new_lines = 0 + self.stream.write(x) + + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: + """Combination of newline and write.""" + self.newline(node, extra) + self.write(x) + + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: + """Add one or more newlines before the next write.""" + self._new_lines = max(self._new_lines, 1 + extra) + if node is not None and node.lineno != self._last_line: + self._write_debug_info = node.lineno + self._last_line = node.lineno + + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: + """Writes a function call to the stream for the current node. + A leading comma is added automatically. The extra keyword + arguments may not include python keywords otherwise a syntax + error could occur. The extra keyword arguments should be given + as python dict. + """ + # if any of the given keyword arguments is a python keyword + # we have to make sure that no invalid call is created. + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) + + for arg in node.args: + self.write(", ") + self.visit(arg, frame) + + if not kwarg_workaround: + for kwarg in node.kwargs: + self.write(", ") + self.visit(kwarg, frame) + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") + if node.dyn_args: + self.write(", *") + self.visit(node.dyn_args, frame) + + if kwarg_workaround: + if node.dyn_kwargs is not None: + self.write(", **dict({") + else: + self.write(", **{") + for kwarg in node.kwargs: + self.write(f"{kwarg.key!r}: ") + self.visit(kwarg.value, frame) + self.write(", ") + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") + if node.dyn_kwargs is not None: + self.write("}, **") + self.visit(node.dyn_kwargs, frame) + self.write(")") + else: + self.write("}") + + elif node.dyn_kwargs is not None: + self.write(", **") + self.visit(node.dyn_kwargs, frame) + + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ + visitor = DependencyFinderVisitor() + + for node in nodes: + visitor.visit(node) + + for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( + self.tests, + visitor.tests, + "tests", + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: + undefs = [] + for target, (action, param) in frame.symbols.loads.items(): + if action == VAR_LOAD_PARAMETER: + pass + elif action == VAR_LOAD_RESOLVE: + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") + elif action == VAR_LOAD_ALIAS: + self.writeline(f"{target} = {param}") + elif action == VAR_LOAD_UNDEFINED: + undefs.append(target) + else: + raise NotImplementedError("unknown load instruction") + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: + if not with_python_scope: + undefs = [] + for target in frame.symbols.loads: + undefs.append(target) + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value + + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: + """Dump the function def of a macro or call block.""" + frame = frame.inner() + frame.symbols.analyze_node(node) + macro_ref = MacroRef(node) + + explicit_caller = None + skip_special_params = set() + args = [] + + for idx, arg in enumerate(node.args): + if arg.name == "caller": + explicit_caller = idx + if arg.name in ("kwargs", "varargs"): + skip_special_params.add(arg.name) + args.append(frame.symbols.ref(arg.name)) + + undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) + + if "caller" in undeclared: + # In older Jinja versions there was a bug that allowed caller + # to retain the special behavior even if it was mentioned in + # the argument list. However thankfully this was only really + # working if it was the last argument. So we are explicitly + # checking this now and error out if it is anywhere else in + # the argument list. + if explicit_caller is not None: + try: + node.defaults[explicit_caller - len(node.args)] + except IndexError: + self.fail( + "When defining macros or call blocks the " + 'special "caller" argument must be omitted ' + "or be given a default.", + node.lineno, + ) + else: + args.append(frame.symbols.declare_parameter("caller")) + macro_ref.accesses_caller = True + if "kwargs" in undeclared and "kwargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("kwargs")) + macro_ref.accesses_kwargs = True + if "varargs" in undeclared and "varargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("varargs")) + macro_ref.accesses_varargs = True + + # macros are delayed, they never require output checks + frame.require_output_check = False + frame.symbols.analyze_node(node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) + self.indent() + + self.buffer(frame) + self.enter_frame(frame) + + self.push_parameter_definitions(frame) + for idx, arg in enumerate(node.args): + ref = frame.symbols.ref(arg.name) + self.writeline(f"if {ref} is missing:") + self.indent() + try: + default = node.defaults[idx - len(node.args)] + except IndexError: + self.writeline( + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" + ) + else: + self.writeline(f"{ref} = ") + self.visit(default, frame) + self.mark_parameter_stored(ref) + self.outdent() + self.pop_parameter_definitions() + + self.blockvisit(node.body, frame) + self.return_buffer_contents(frame, force_unescaped=True) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + return frame, macro_ref + + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: + """Dump the macro definition for the def created by macro_body.""" + arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, "name", None) + if len(macro_ref.node.args) == 1: + arg_tuple += "," + self.write( + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" + ) + + def position(self, node: nodes.Node) -> str: + """Return a human readable position for the node.""" + rv = f"line {node.lineno}" + if self.name is not None: + rv = f"{rv} in {self.name!r}" + return rv + + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() + ) + return f"{{{items_kv}}}" + + def write_commons(self) -> None: + """Writes a common preamble that is used by root and block functions. + Primarily this sets up common local helpers and enforces a generator + through a dead branch. + """ + self.writeline("resolve = context.resolve_or_missing") + self.writeline("undefined = environment.undefined") + self.writeline("concat = environment.concat") + # always use the standard Undefined class for the implicit else of + # conditional expressions + self.writeline("cond_expr_undefined = Undefined") + self.writeline("if 0: yield None") + + def push_parameter_definitions(self, frame: Frame) -> None: + """Pushes all parameter targets from the given frame into a local + stack that permits tracking of yet to be assigned parameters. In + particular this enables the optimization from `visit_Name` to skip + undefined expressions for parameters in macros as macros can reference + otherwise unbound parameters. + """ + self._param_def_block.append(frame.symbols.dump_param_targets()) + + def pop_parameter_definitions(self) -> None: + """Pops the current parameter definitions set.""" + self._param_def_block.pop() + + def mark_parameter_stored(self, target: str) -> None: + """Marks a parameter in the current parameter definitions as stored. + This will skip the enforced undefined checks. + """ + if self._param_def_block: + self._param_def_block[-1].discard(target) + + def push_context_reference(self, target: str) -> None: + self._context_reference_stack.append(target) + + def pop_context_reference(self) -> None: + self._context_reference_stack.pop() + + def get_context_ref(self) -> str: + return self._context_reference_stack[-1] + + def get_resolve_func(self) -> str: + target = self._context_reference_stack[-1] + if target == "context": + return "resolve" + return f"{target}.resolve" + + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" + + def parameter_is_undeclared(self, target: str) -> bool: + """Checks if a given target is an undeclared parameter.""" + if not self._param_def_block: + return False + return target in self._param_def_block[-1] + + def push_assign_tracking(self) -> None: + """Pushes a new layer for assignment tracking.""" + self._assign_stack.append(set()) + + def pop_assign_tracking(self, frame: Frame) -> None: + """Pops the topmost level for assignment tracking and updates the + context variables if necessary. + """ + vars = self._assign_stack.pop() + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): + return + public_names = [x for x in vars if x[:1] != "_"] + if len(vars) == 1: + name = next(iter(vars)) + ref = frame.symbols.ref(name) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") + else: + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") + for idx, name in enumerate(vars): + if idx: + self.write(", ") + ref = frame.symbols.ref(name) + self.write(f"{name!r}: {ref}") + self.write("})") + if not frame.block_frame and not frame.loop_frame and public_names: + if len(public_names) == 1: + self.writeline(f"context.exported_vars.add({public_names[0]!r})") + else: + names_str = ", ".join(map(repr, public_names)) + self.writeline(f"context.exported_vars.update(({names_str}))") + + # -- Statement Visitors + + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: + assert frame is None, "no root frame allowed" + eval_ctx = EvalContext(self.environment, self.name) + + from .runtime import exported, async_exported + + if self.environment.is_async: + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) + + # if we want a deferred initialization we cannot move the + # environment into a local name + envenv = "" if self.defer_init else ", environment=environment" + + # do we have an extends tag at all? If not, we can save some + # overhead by just not processing any inheritance code. + have_extends = node.find(nodes.Extends) is not None + + # find all blocks + for block in node.find_all(nodes.Block): + if block.name in self.blocks: + self.fail(f"block {block.name!r} defined twice", block.lineno) + self.blocks[block.name] = block + + # find all imports and import them + for import_ in node.find_all(nodes.ImportedName): + if import_.importname not in self.import_aliases: + imp = import_.importname + self.import_aliases[imp] = alias = self.temporary_identifier() + if "." in imp: + module, obj = imp.rsplit(".", 1) + self.writeline(f"from {module} import {obj} as {alias}") + else: + self.writeline(f"import {imp} as {alias}") + + # add the load name + self.writeline(f"name = {self.name!r}") + + # generate the root render function. + self.writeline( + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 + ) + self.indent() + self.write_commons() + + # process the root + frame = Frame(eval_ctx) + if "self" in find_undeclared(node.body, ("self",)): + ref = frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + frame.symbols.analyze_node(node) + frame.toplevel = frame.rootlevel = True + frame.require_output_check = have_extends and not self.has_known_extends + if have_extends: + self.writeline("parent_template = None") + self.enter_frame(frame) + self.pull_dependencies(node.body) + self.blockvisit(node.body, frame) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + # make sure that the parent root is called. + if have_extends: + if not self.has_known_extends: + self.indent() + self.writeline("if parent_template is not None:") + self.indent() + if not self.environment.is_async: + self.writeline("yield from parent_template.root_render_func(context)") + else: + self.writeline( + "async for event in parent_template.root_render_func(context):" + ) + self.indent() + self.writeline("yield event") + self.outdent() + self.outdent(1 + (not self.has_known_extends)) + + # at this point we now have the blocks collected and can visit them too. + for name, block in self.blocks.items(): + self.writeline( + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", + block, + 1, + ) + self.indent() + self.write_commons() + # It's important that we do not make this frame a child of the + # toplevel template. This would cause a variety of + # interesting issues with identifier tracking. + block_frame = Frame(eval_ctx) + block_frame.block_frame = True + undeclared = find_undeclared(block.body, ("self", "super")) + if "self" in undeclared: + ref = block_frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + if "super" in undeclared: + ref = block_frame.symbols.declare_parameter("super") + self.writeline(f"{ref} = context.super({name!r}, block_{name})") + block_frame.symbols.analyze_node(block) + block_frame.block = name + self.writeline("_block_vars = {}") + self.enter_frame(block_frame) + self.pull_dependencies(block.body) + self.blockvisit(block.body, block_frame) + self.leave_frame(block_frame, with_python_scope=True) + self.outdent() + + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") + + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: + """Call a block and register it for the template.""" + level = 0 + if frame.toplevel: + # if we know that we are a child template, there is no need to + # check if we are one + if self.has_known_extends: + return + if self.extends_so_far > 0: + self.writeline("if parent_template is None:") + self.indent() + level += 1 + + if node.scoped: + context = self.derive_context(frame) + else: + context = self.get_context_ref() + + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() + self.writeline( + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node + ) + else: + self.writeline( + f"{self.choose_async()}for event in" + f" context.blocks[{node.name!r}][0]({context}):", + node, + ) + self.indent() + self.simple_write("event", frame) + self.outdent() + + self.outdent(level) + + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: + """Calls the extender.""" + if not frame.toplevel: + self.fail("cannot use extend from a non top-level scope", node.lineno) + + # if the number of extends statements in general is zero so + # far, we don't have to add a check if something extended + # the template before this one. + if self.extends_so_far > 0: + # if we have a known extends we just add a template runtime + # error into the generated code. We could catch that at compile + # time too, but i welcome it not to confuse users by throwing the + # same error at different times just "because we can". + if not self.has_known_extends: + self.writeline("if parent_template is not None:") + self.indent() + self.writeline('raise TemplateRuntimeError("extended multiple times")') + + # if we have a known extends already we don't need that code here + # as we know that the template execution will end here. + if self.has_known_extends: + raise CompilerExit() + else: + self.outdent() + + self.writeline("parent_template = environment.get_template(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") + self.indent() + self.writeline("context.blocks.setdefault(name, []).append(parent_block)") + self.outdent() + + # if this extends statement was in the root level we can take + # advantage of that information and simplify the generated code + # in the top level from this point onwards + if frame.rootlevel: + self.has_known_extends = True + + # and now we have one more + self.extends_so_far += 1 + + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: + """Handles includes.""" + if node.ignore_missing: + self.writeline("try:") + self.indent() + + func_name = "get_or_select_template" + if isinstance(node.template, nodes.Const): + if isinstance(node.template.value, str): + func_name = "get_template" + elif isinstance(node.template.value, (tuple, list)): + func_name = "select_template" + elif isinstance(node.template, (nodes.Tuple, nodes.List)): + func_name = "select_template" + + self.writeline(f"template = environment.{func_name}(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + if node.ignore_missing: + self.outdent() + self.writeline("except TemplateNotFound:") + self.indent() + self.writeline("pass") + self.outdent() + self.writeline("else:") + self.indent() + + skip_event_yield = False + if node.with_context: + self.writeline( + f"{self.choose_async()}for event in template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)})):" + ) + elif self.environment.is_async: + self.writeline( + "for event in (await template._get_default_module_async())" + "._body_stream:" + ) + else: + self.writeline("yield from template._get_default_module()._body_stream") + skip_event_yield = True + + if not skip_event_yield: + self.indent() + self.simple_write("event", frame) + self.outdent() + + if node.ignore_missing: + self.outdent() + + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") + self.visit(node.template, frame) + self.write(f", {self.name!r}).") + + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: + """Visit named imports.""" + self.newline(node) + self.write("included_template = ") + self._import_common(node, frame) + var_names = [] + discarded_names = [] + for name in node.names: + if isinstance(name, tuple): + name, alias = name + else: + alias = name + self.writeline( + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" + ) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") + self.indent() + message = ( + "the template {included_template.__name__!r}" + f" (imported on {self.position(node)})" + f" does not export the requested name {name!r}" + ) + self.writeline( + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" + ) + self.outdent() + if frame.toplevel: + var_names.append(alias) + if not alias.startswith("_"): + discarded_names.append(alias) + + if var_names: + if len(var_names) == 1: + name = var_names[0] + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") + else: + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names + ) + self.writeline(f"context.vars.update({{{names_kv}}})") + if discarded_names: + if len(discarded_names) == 1: + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") + else: + names_str = ", ".join(map(repr, discarded_names)) + self.writeline( + f"context.exported_vars.difference_update(({names_str}))" + ) + + def visit_For(self, node: nodes.For, frame: Frame) -> None: + loop_frame = frame.inner() + loop_frame.loop_frame = True + test_frame = frame.inner() + else_frame = frame.inner() + + # try to figure out if we have an extended loop. An extended loop + # is necessary if the loop is in recursive mode if the special loop + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) + ) + + loop_ref = None + if extended_loop: + loop_ref = loop_frame.symbols.declare_parameter("loop") + + loop_frame.symbols.analyze_node(node, for_branch="body") + if node.else_: + else_frame.symbols.analyze_node(node, for_branch="else") + + if node.test: + loop_filter_func = self.temporary_identifier() + test_frame.symbols.analyze_node(node, for_branch="test") + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) + self.indent() + self.enter_frame(test_frame) + self.writeline(self.choose_async("async for ", "for ")) + self.visit(node.target, loop_frame) + self.write(" in ") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) + self.write(":") + self.indent() + self.writeline("if ", node.test) + self.visit(node.test, test_frame) + self.write(":") + self.indent() + self.writeline("yield ") + self.visit(node.target, loop_frame) + self.outdent(3) + self.leave_frame(test_frame, with_python_scope=True) + + # if we don't have an recursive loop we have to find the shadowed + # variables at that point. Because loops can be nested but the loop + # variable is a special one we have to enforce aliasing for it. + if node.recursive: + self.writeline( + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node + ) + self.indent() + self.buffer(loop_frame) + + # Use the same buffer for the else frame + else_frame.buffer = loop_frame.buffer + + # make sure the loop variable is a special one and raise a template + # assertion error if a loop tries to write to loop + if extended_loop: + self.writeline(f"{loop_ref} = missing") + + for name in node.find_all(nodes.Name): + if name.ctx == "store" and name.name == "loop": + self.fail( + "Can't assign to special loop variable in for-loop target", + name.lineno, + ) + + if node.else_: + iteration_indicator = self.temporary_identifier() + self.writeline(f"{iteration_indicator} = 1") + + self.writeline(self.choose_async("async for ", "for "), node) + self.visit(node.target, loop_frame) + if extended_loop: + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") + else: + self.write(" in ") + + if node.test: + self.write(f"{loop_filter_func}(") + if node.recursive: + self.write("reciter") + else: + if self.environment.is_async and not extended_loop: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async and not extended_loop: + self.write(")") + if node.test: + self.write(")") + + if node.recursive: + self.write(", undefined, loop_render_func, depth):") + else: + self.write(", undefined):" if extended_loop else ":") + + self.indent() + self.enter_frame(loop_frame) + + self.writeline("_loop_vars = {}") + self.blockvisit(node.body, loop_frame) + if node.else_: + self.writeline(f"{iteration_indicator} = 0") + self.outdent() + self.leave_frame( + loop_frame, with_python_scope=node.recursive and not node.else_ + ) + + if node.else_: + self.writeline(f"if {iteration_indicator}:") + self.indent() + self.enter_frame(else_frame) + self.blockvisit(node.else_, else_frame) + self.leave_frame(else_frame) + self.outdent() + + # if the node was recursive we have to return the buffer contents + # and start the iteration code + if node.recursive: + self.return_buffer_contents(loop_frame) + self.outdent() + self.start_write(frame, node) + self.write(f"{self.choose_async('await ')}loop(") + if self.environment.is_async: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async: + self.write(")") + self.write(", loop)") + self.end_write(frame) + + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: + if_frame = frame.soft() + self.writeline("if ", node) + self.visit(node.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(node.body, if_frame) + self.outdent() + for elif_ in node.elif_: + self.writeline("elif ", elif_) + self.visit(elif_.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(elif_.body, if_frame) + self.outdent() + if node.else_: + self.writeline("else:") + self.indent() + self.blockvisit(node.else_, if_frame) + self.outdent() + + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: + macro_frame, macro_ref = self.macro_body(node, frame) + self.newline() + if frame.toplevel: + if not node.name.startswith("_"): + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") + self.macro_def(macro_ref, macro_frame) + + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: + call_frame, macro_ref = self.macro_body(node, frame) + self.writeline("caller = ") + self.macro_def(macro_ref, call_frame) + self.start_write(frame, node) + self.visit_Call(node.call, frame, forward_caller=True) + self.end_write(frame) + + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: + filter_frame = frame.inner() + filter_frame.symbols.analyze_node(node) + self.enter_frame(filter_frame) + self.buffer(filter_frame) + self.blockvisit(node.body, filter_frame) + self.start_write(frame, node) + self.visit_Filter(node.filter, filter_frame) + self.end_write(frame) + self.leave_frame(filter_frame) + + def visit_With(self, node: nodes.With, frame: Frame) -> None: + with_frame = frame.inner() + with_frame.symbols.analyze_node(node) + self.enter_frame(with_frame) + for target, expr in zip(node.targets, node.values): + self.newline() + self.visit(target, with_frame) + self.write(" = ") + self.visit(expr, frame) + self.blockvisit(node.body, with_frame) + self.leave_frame(with_frame) + + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: + self.newline(node) + self.visit(node.node, frame) + + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] + + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: + """Build the finalize function to be used on constants and at + runtime. Cached so it's only created once for all output nodes. + + Returns a ``namedtuple`` with the following attributes: + + ``const`` + A function to finalize constant data at compile time. + + ``src`` + Source code to output around nodes to be evaluated at + runtime. + """ + if self._finalize is not None: + return self._finalize + + finalize: t.Optional[t.Callable[..., t.Any]] + finalize = default = self._default_finalize + src = None + + if self.environment.finalize: + src = "environment.finalize(" + env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None + + if pass_arg is None: + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(value)) + + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(self.environment, value)) + + self._finalize = self._FinalizeInfo(finalize, src) + return self._finalize + + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + """Given a group of constant values converted from ``Output`` + child nodes, produce a string to write to the template module + source. + """ + return repr(concat(group)) + + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: + """Try to optimize a child of an ``Output`` node by trying to + convert it to constant, finalized data at compile time. + + If :exc:`Impossible` is raised, the node is not constant and + will be evaluated at runtime. Any other exception will also be + evaluated at runtime for easier debugging. + """ + const = node.as_const(frame.eval_ctx) + + if frame.eval_ctx.autoescape: + const = escape(const) + + # Template data doesn't go through finalize. + if isinstance(node, nodes.TemplateData): + return str(const) + + return finalize.const(const) # type: ignore + + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code before visiting a child of an + ``Output`` node. + """ + if frame.eval_ctx.volatile: + self.write("(escape if context.eval_ctx.autoescape else str)(") + elif frame.eval_ctx.autoescape: + self.write("escape(") + else: + self.write("str(") + + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code after visiting a child of an + ``Output`` node. + """ + self.write(")") + + if finalize.src is not None: + self.write(")") + + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: + # If an extends is active, don't render outside a block. + if frame.require_output_check: + # A top-level extends is known to exist at compile time. + if self.has_known_extends: + return + + self.writeline("if parent_template is None:") + self.indent() + + finalize = self._make_finalize() + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] + + # Evaluate constants at compile time if possible. Each item in + # body will be either a list of static data or a node to be + # evaluated at runtime. + for child in node.nodes: + try: + if not ( + # If the finalize function requires runtime context, + # constants can't be evaluated at compile time. + finalize.const + # Unless it's basic template data that won't be + # finalized anyway. + or isinstance(child, nodes.TemplateData) + ): + raise nodes.Impossible() + + const = self._output_child_to_const(child, frame, finalize) + except (nodes.Impossible, Exception): + # The node was not constant and needs to be evaluated at + # runtime. Or another error was raised, which is easier + # to debug at runtime. + body.append(child) + continue + + if body and isinstance(body[-1], list): + body[-1].append(const) + else: + body.append([const]) + + if frame.buffer is not None: + if len(body) == 1: + self.writeline(f"{frame.buffer}.append(") + else: + self.writeline(f"{frame.buffer}.extend((") + + self.indent() + + for item in body: + if isinstance(item, list): + # A group of constant data to join and output. + val = self._output_const_repr(item) + + if frame.buffer is None: + self.writeline("yield " + val) + else: + self.writeline(val + ",") + else: + if frame.buffer is None: + self.writeline("yield ", item) + else: + self.newline(item) + + # A node to be evaluated at runtime. + self._output_child_pre(item, frame, finalize) + self.visit(item, frame) + self._output_child_post(item, frame, finalize) + + if frame.buffer is not None: + self.write(",") + + if frame.buffer is not None: + self.outdent() + self.writeline(")" if len(body) == 1 else "))") + + if frame.require_output_check: + self.outdent() + + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: + self.push_assign_tracking() + self.newline(node) + self.visit(node.target, frame) + self.write(" = ") + self.visit(node.node, frame) + self.pop_assign_tracking(frame) + + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: + self.push_assign_tracking() + block_frame = frame.inner() + # This is a special case. Since a set block always captures we + # will disable output checks. This way one can use set blocks + # toplevel even in extended templates. + block_frame.require_output_check = False + block_frame.symbols.analyze_node(node) + self.enter_frame(block_frame) + self.buffer(block_frame) + self.blockvisit(node.body, block_frame) + self.newline(node) + self.visit(node.target, frame) + self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") + if node.filter is not None: + self.visit_Filter(node.filter, block_frame) + else: + self.write(f"concat({block_frame.buffer})") + self.write(")") + self.pop_assign_tracking(frame) + self.leave_frame(block_frame) + + # -- Expression Visitors + + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): + if self._assign_stack: + self._assign_stack[-1].add(node.name) + ref = frame.symbols.ref(node.name) + + # If we are looking up a variable we might have to deal with the + # case where it's undefined. We can skip that case if the load + # instruction indicates a parameter which are always defined. + if node.ctx == "load": + load = frame.symbols.find_load(ref) + if not ( + load is not None + and load[0] == VAR_LOAD_PARAMETER + and not self.parameter_is_undeclared(ref) + ): + self.write( + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" + ) + return + + self.write(ref) + + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: + # NSRefs can only be used to store values; since they use the normal + # `foo.bar` notation they will be parsed as a normal attribute access + # when used anywhere but in a `set` context + ref = frame.symbols.ref(node.name) + self.writeline(f"if not isinstance({ref}, Namespace):") + self.indent() + self.writeline( + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' + ) + self.outdent() + self.writeline(f"{ref}[{node.attr!r}]") + + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: + val = node.as_const(frame.eval_ctx) + if isinstance(val, float): + self.write(str(val)) + else: + self.write(repr(val)) + + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: + try: + self.write(repr(node.as_const(frame.eval_ctx))) + except nodes.Impossible: + self.write( + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" + ) + + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: + self.write("(") + idx = -1 + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write(",)" if idx == 0 else ")") + + def visit_List(self, node: nodes.List, frame: Frame) -> None: + self.write("[") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write("]") + + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: + self.write("{") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item.key, frame) + self.write(": ") + self.visit(item.value, frame) + self.write("}") + + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") + + @optimizeconst + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: + if frame.eval_ctx.volatile: + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" + elif frame.eval_ctx.autoescape: + func_name = "markup_join" + else: + func_name = "str_join" + self.write(f"{func_name}((") + for arg in node.nodes: + self.visit(arg, frame) + self.write(", ") + self.write("))") + + @optimizeconst + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: + self.write("(") + self.visit(node.expr, frame) + for op in node.ops: + self.visit(op, frame) + self.write(")") + + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") + self.visit(node.expr, frame) + + @optimizeconst + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getattr(") + self.visit(node.node, frame) + self.write(f", {node.attr!r})") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: + # slices bypass the environment getitem method. + if isinstance(node.arg, nodes.Slice): + self.visit(node.node, frame) + self.write("[") + self.visit(node.arg, frame) + self.write("]") + else: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getitem(") + self.visit(node.node, frame) + self.write(", ") + self.visit(node.arg, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: + if node.start is not None: + self.visit(node.start, frame) + self.write(":") + if node.stop is not None: + self.visit(node.stop, frame) + if node.step is not None: + self.write(":") + self.visit(node.step, frame) + + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: + if self.environment.is_async: + self.write("(await auto_await(") + + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) + else: + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + + self.signature(node, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" + ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") + + @optimizeconst + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: + if node.expr2 is not None: + self.visit(node.expr2, frame) + return + + self.write( + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' + ) + + self.write("(") + self.visit(node.expr1, frame) + self.write(" if ") + self.visit(node.test, frame) + self.write(" else ") + write_expr2() + self.write(")") + + @optimizeconst + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + if self.environment.sandboxed: + self.write("environment.call(context, ") + else: + self.write("context.call(") + self.visit(node.node, frame) + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) + self.signature(node, frame, extra_kwargs) + self.write(")") + if self.environment.is_async: + self.write("))") + + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: + self.write(node.key + "=") + self.visit(node.value, frame) + + # -- Unused nodes for extensions + + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: + self.write("Markup(") + self.visit(node.expr, frame) + self.write(")") + + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") + self.visit(node.expr, frame) + self.write(")") + + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: + self.write("environment." + node.name) + + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") + + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: + self.write(self.import_aliases[node.importname]) + + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: + self.write(node.name) + + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: + self.write("context") + + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: + self.write(self.derive_context(frame)) + + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: + self.writeline("continue", node) + + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: + self.writeline("break", node) + + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: + scope_frame = frame.inner() + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: + ctx = self.temporary_identifier() + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") + self.visit(node.context, frame) + self.push_context_reference(ctx) + + scope_frame = frame.inner(isolated=True) + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + self.pop_context_reference() + + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: + for keyword in node.options: + self.writeline(f"context.eval_ctx.{keyword.key} = ") + self.visit(keyword.value, frame) + try: + val = keyword.value.as_const(frame.eval_ctx) + except nodes.Impossible: + frame.eval_ctx.volatile = True + else: + setattr(frame.eval_ctx, keyword.key, val) + + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: + old_ctx_name = self.temporary_identifier() + saved_ctx = frame.eval_ctx.save() + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") + self.visit_EvalContextModifier(node, frame) + for child in node.body: + self.visit(child, frame) + frame.eval_ctx.revert(saved_ctx) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/lib/python3.12/site-packages/jinja2/constants.py b/lib/python3.12/site-packages/jinja2/constants.py new file mode 100644 index 0000000..41a1c23 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/constants.py @@ -0,0 +1,20 @@ +#: list of lorem ipsum words used by the lipsum() helper function +LOREM_IPSUM_WORDS = """\ +a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at +auctor augue bibendum blandit class commodo condimentum congue consectetuer +consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus +diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend +elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames +faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac +hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum +justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem +luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie +mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non +nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque +penatibus per pharetra phasellus placerat platea porta porttitor posuere +potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus +ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit +sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor +tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices +ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus +viverra volutpat vulputate""" diff --git a/lib/python3.12/site-packages/jinja2/debug.py b/lib/python3.12/site-packages/jinja2/debug.py new file mode 100644 index 0000000..7ed7e92 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/debug.py @@ -0,0 +1,191 @@ +import sys +import typing as t +from types import CodeType +from types import TracebackType + +from .exceptions import TemplateSyntaxError +from .utils import internal_code +from .utils import missing + +if t.TYPE_CHECKING: + from .runtime import Context + + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: + """Rewrite the current exception to replace any tracebacks from + within compiled template code with tracebacks that look like they + came from the template source. + + This must be called within an ``except`` block. + + :param source: For ``TemplateSyntaxError``, the original source if + known. + :return: The original exception with the rewritten traceback. + """ + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) + + if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: + exc_value.translated = True + exc_value.source = source + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) + # Outside of runtime, so the frame isn't executing template + # code, but it still needs to point at the template. + tb = fake_traceback( + exc_value, None, exc_value.filename or "", exc_value.lineno + ) + else: + # Skip the frame for the render function. + tb = tb.tb_next + + stack = [] + + # Build the stack of traceback object, replacing any in template + # code with the source file and line information. + while tb is not None: + # Skip frames decorated with @internalcode. These are internal + # calls that aren't useful in template debugging output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + template = tb.tb_frame.f_globals.get("__jinja_template__") + + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) + stack.append(fake_tb) + else: + stack.append(tb) + + tb = tb.tb_next + + tb_next = None + + # Assign tb_next in reverse to avoid circular references. + for tb in reversed(stack): + tb.tb_next = tb_next + tb_next = tb + + return exc_value.with_traceback(tb_next) + + +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: + """Produce a new traceback object that looks like it came from the + template source instead of the compiled code. The filename, line + number, and location name will point to the template, and the local + variables will be the current template context. + + :param exc_value: The original exception to be re-raised to create + the new traceback. + :param tb: The original traceback to get the local variables and + code info from. + :param filename: The template filename. + :param lineno: The line number in the template source. + """ + if tb is not None: + # Replace the real locals with the context that would be + # available at that point in the template. + locals = get_template_locals(tb.tb_frame.f_locals) + locals.pop("__jinja_exception__", None) + else: + locals = {} + + globals = { + "__name__": filename, + "__file__": filename, + "__jinja_exception__": exc_value, + } + # Raise an exception at the correct line number. + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) + + # Build a new code object that points to the template file and + # replaces the location with a block name. + location = "template" + + if tb is not None: + function = tb.tb_frame.f_code.co_name + + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" + + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) + + # Execute the new code, which is guaranteed to raise, and return + # the new traceback without this frame. + try: + exec(code, globals, locals) + except BaseException: + return sys.exc_info()[2].tb_next # type: ignore + + +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: + """Based on the runtime locals, get the context that would be + available at that point in the template. + """ + # Start with the current template context. + ctx: "t.Optional[Context]" = real_locals.get("context") + + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() + else: + data = {} + + # Might be in a derived context that only sets local variables + # rather than pushing a context. Local variables follow the scheme + # l_depth_name. Find the highest-depth local that has a value for + # each name. + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} + + for name, value in real_locals.items(): + if not name.startswith("l_") or value is missing: + # Not a template variable, or no longer relevant. + continue + + try: + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) + except ValueError: + continue + + cur_depth = local_overrides.get(name, (-1,))[0] + + if cur_depth < depth: + local_overrides[name] = (depth, value) + + # Modify the context with any derived context. + for name, (_, value) in local_overrides.items(): + if value is missing: + data.pop(name, None) + else: + data[name] = value + + return data diff --git a/lib/python3.12/site-packages/jinja2/defaults.py b/lib/python3.12/site-packages/jinja2/defaults.py new file mode 100644 index 0000000..638cad3 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/defaults.py @@ -0,0 +1,48 @@ +import typing as t + +from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 +from .tests import TESTS as DEFAULT_TESTS # noqa: F401 +from .utils import Cycler +from .utils import generate_lorem_ipsum +from .utils import Joiner +from .utils import Namespace + +if t.TYPE_CHECKING: + import typing_extensions as te + +# defaults for the parser / lexer +BLOCK_START_STRING = "{%" +BLOCK_END_STRING = "%}" +VARIABLE_START_STRING = "{{" +VARIABLE_END_STRING = "}}" +COMMENT_START_STRING = "{#" +COMMENT_END_STRING = "#}" +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" +KEEP_TRAILING_NEWLINE = False + +# default filters, tests and namespace + +DEFAULT_NAMESPACE = { + "range": range, + "dict": dict, + "lipsum": generate_lorem_ipsum, + "cycler": Cycler, + "joiner": Joiner, + "namespace": Namespace, +} + +# default policies +DEFAULT_POLICIES: t.Dict[str, t.Any] = { + "compiler.ascii_str": True, + "urlize.rel": "noopener", + "urlize.target": None, + "urlize.extra_schemes": None, + "truncate.leeway": 5, + "json.dumps_function": None, + "json.dumps_kwargs": {"sort_keys": True}, + "ext.i18n.trimmed": False, +} diff --git a/lib/python3.12/site-packages/jinja2/environment.py b/lib/python3.12/site-packages/jinja2/environment.py new file mode 100644 index 0000000..185d332 --- /dev/null +++ b/lib/python3.12/site-packages/jinja2/environment.py @@ -0,0 +1,1667 @@ +"""Classes for managing templates and their runtime and compile time +options. +""" +import os +import typing +import typing as t +import weakref +from collections import ChainMap +from functools import lru_cache +from functools import partial +from functools import reduce +from types import CodeType + +from markupsafe import Markup + +from . import nodes +from .compiler import CodeGenerator +from .compiler import generate +from .defaults import BLOCK_END_STRING +from .defaults import BLOCK_START_STRING +from .defaults import COMMENT_END_STRING +from .defaults import COMMENT_START_STRING +from .defaults import DEFAULT_FILTERS +from .defaults import DEFAULT_NAMESPACE +from .defaults import DEFAULT_POLICIES +from .defaults import DEFAULT_TESTS +from .defaults import KEEP_TRAILING_NEWLINE +from .defaults import LINE_COMMENT_PREFIX +from .defaults import LINE_STATEMENT_PREFIX +from .defaults import LSTRIP_BLOCKS +from .defaults import NEWLINE_SEQUENCE +from .defaults import TRIM_BLOCKS +from .defaults import VARIABLE_END_STRING +from .defaults import VARIABLE_START_STRING +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .lexer import get_lexer +from .lexer import Lexer +from .lexer import TokenStream +from .nodes import EvalContext +from .parser import Parser +from .runtime import Context +from .runtime import new_context +from .runtime import Undefined +from .utils import _PassArg +from .utils import concat +from .utils import consume +from .utils import import_string +from .utils import internalcode +from .utils import LRUCache +from .utils import missing + +if t.TYPE_CHECKING: + import typing_extensions as te + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + +# for direct template usage we have up to ten living environments +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: + """Return a new spontaneous environment. A spontaneous environment + is used for templates created directly rather than through an + existing environment. + + :param cls: Environment class to create. + :param args: Positional arguments passed to environment. + """ + env = cls(*args) + env.shared = True + return env + + +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: + """Return the cache class for the given size.""" + if size == 0: + return None + + if size < 0: + return {} + + return LRUCache(size) # type: ignore + + +def copy_cache( + cache: t.Optional[t.MutableMapping], +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: + """Create an empty copy of the given cache.""" + if cache is None: + return None + + if type(cache) is dict: + return {} + + return LRUCache(cache.capacity) # type: ignore + + +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated extensions. + """ + result = {} + + for extension in extensions: + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + + result[extension.identifier] = extension(environment) + + return result + + +def _environment_config_check(environment: "Environment") -> "Environment": + """Perform a sanity check on the environment.""" + assert issubclass( + environment.undefined, Undefined + ), "'undefined' must be a subclass of 'jinja2.Undefined'." + assert ( + environment.block_start_string + != environment.variable_start_string + != environment.comment_start_string + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { + "\r", + "\r\n", + "\n", + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." + return environment + + +class Environment: + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here are the possible initialization parameters: + + `block_start_string` + The string marking the beginning of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the beginning of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the beginning of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation `. + + `optimized` + should the optimizer be enabled? Default is ``True``. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + ``None`` implicitly into an empty string here. + + `autoescape` + If set to ``True`` the XML/HTML autoescaping feature is enabled by + default. For more details about autoescaping see + :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return ``True`` or ``False`` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``400`` which means + that if more than 400 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + .. versionchanged:: 2.8 + The cache size was increased to 400 from a low 50. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + ``auto_reload`` is set to ``True`` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + + `enable_async` + If set to true this enables async template execution which + allows using async functions and generators. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to: t.Optional["Environment"] = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: the class that is used for code generation. See + #: :class:`~jinja2.compiler.CodeGenerator` for more information. + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator + + concat = "".join + + #: the context class that is used for templates. See + #: :class:`~jinja2.runtime.Context` for more information. + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] + + def __init__( + self, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, + ): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined: t.Type[Undefined] = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # configurable policies + self.policies = DEFAULT_POLICIES.copy() + + # load extensions + self.extensions = load_extensions(self, extensions) + + self.is_async = enable_async + _environment_config_check(self) + + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes: t.Any) -> None: + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions ` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in attributes.items(): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay( + self, + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, + keep_trailing_newline: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + enable_async: bool = False, + ) -> "Environment": + """Create a new overlay environment that shares all the data with the + current environment except for cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + + .. versionchanged:: 3.1.2 + Added the ``newline_sequence``,, ``keep_trailing_newline``, + and ``enable_async`` parameters to match ``__init__``. + """ + args = dict(locals()) + del args["self"], args["cache_size"], args["extensions"], args["enable_async"] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in args.items(): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in self.extensions.items(): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + if enable_async is not missing: + rv.is_async = enable_async + + return _environment_config_check(rv) + + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) + + def iter_extensions(self) -> t.Iterator["Extension"]: + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) + + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (AttributeError, TypeError, LookupError): + if isinstance(argument, str): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj: t.Any, attribute: str) -> t.Any: + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a string. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" + + func = env_map.get(name) # type: ignore + + if func is None: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: + if context is None: + raise TemplateRuntimeError( + f"Attempted to invoke a context {type_name} without context." + ) + + args.insert(0, context) + elif pass_arg is _PassArg.eval_context: + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) + + @internalcode + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja extensions ` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, filename).parse() + + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development ` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = str(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce( + lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), + str(source), + ) + + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) # type: ignore + + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) # type: ignore + + return stream + + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate( # type: ignore + source, + self, + name, + filename, + defer_init=defer_init, + optimized=self.optimized, + ) + + def _compile(self, source: str, filename: str) -> CodeType: + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, "exec") + + @typing.overload + def compile( # type: ignore + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: + ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: + ... + + @internalcode + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, str): + source_hint = source + source = self._parse(source, name, filename) + source = self._generate(source, name, filename, defer_init=defer_init) + if raw: + return source + if filename is None: + filename = "