diff options
author | Karl Berry <karl@freefriends.org> | 2020-10-20 20:39:10 +0000 |
---|---|---|
committer | Karl Berry <karl@freefriends.org> | 2020-10-20 20:39:10 +0000 |
commit | 91c386913cf9993cb4271471921934a5c2e019a0 (patch) | |
tree | 392fd984d6c207e452f86f4644e330fac61a4140 /Master | |
parent | 735245a8b0ba602e5e319d146fbbb7ffe1c39ca5 (diff) |
jupynotex (20oct20)
git-svn-id: svn://tug.org/texlive/trunk@56715 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master')
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/LICENSE | 201 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/README.md | 73 | ||||
-rwxr-xr-x | Master/texmf-dist/doc/latex/jupynotex/example/build | 5 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/example/example.tex | 27 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/example/notebook.ipynb | 593 | ||||
-rwxr-xr-x | Master/texmf-dist/doc/latex/jupynotex/tests/run | 3 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/tests/test_cellparser.py | 79 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/tests/test_main.py | 111 | ||||
-rw-r--r-- | Master/texmf-dist/doc/latex/jupynotex/tests/test_notebook.py | 237 | ||||
-rw-r--r-- | Master/texmf-dist/tex/latex/jupynotex/jupynotex.py | 166 | ||||
-rw-r--r-- | Master/texmf-dist/tex/latex/jupynotex/jupynotex.sty | 7 | ||||
-rwxr-xr-x | Master/tlpkg/bin/tlpkg-ctan-check | 2 | ||||
-rwxr-xr-x | Master/tlpkg/libexec/ctan2tds | 1 | ||||
-rw-r--r-- | Master/tlpkg/tlpsrc/collection-mathscience.tlpsrc | 1 | ||||
-rw-r--r-- | Master/tlpkg/tlpsrc/jupynotex.tlpsrc | 0 |
15 files changed, 1505 insertions, 1 deletions
diff --git a/Master/texmf-dist/doc/latex/jupynotex/LICENSE b/Master/texmf-dist/doc/latex/jupynotex/LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Master/texmf-dist/doc/latex/jupynotex/README.md b/Master/texmf-dist/doc/latex/jupynotex/README.md new file mode 100644 index 00000000000..d19b75542c4 --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/README.md @@ -0,0 +1,73 @@ +# What is Jupynotex? + +A Jupyter Notebook to LaTeX translator to include whole or partial notebooks in your papers. + +## Wait, what? + +A TeX package that you can use in your project to include Jupyter Notebooks (all of them, or some specific cells) as part of your text. + +It will convert the Jupyter Notebook format to proper LaTeX so it gets included seamless, supporting text, latex, images, etc. + + +# How To Use? + +All you need to do is include the `jupynotex.py` and `jupynotex.sty` files in your LaTeX project, and use the package from your any of your `.tex` files: + + \usepackage{jupynotex} + +After that, you can include a whole Jupyter Notebook in your file just specifying it's file name: + + \jupynotex{file_name_for_your_notebook.ipynb} + +If you do not want to include it completely, you can optionally specify which cells: + + \jupynotex[<which cells>]{sample.ipynb} + +The cells specification can be numbers separated by comma, or ranges using dashes (defaulting to first and last if any side is not included). + +Examples: + +- include the whole *foobar* notebook: + + `\jupynotex{foobar.ipynb}` + +- include just the cell #7: + + `\jupynotex[7]{sample.ipynb}` + +- include cells 1, 3, and 6, 7, and 8 from the range: + + `\jupynotex[1,3,6-8]{sample.ipynb}` + +- include everything up to the fourth cell, and the eigth: + + `\jupynotex[-4,8]{whatever.ipynb}` + +- include the cell number 3, and from 12 to the notebook's end + + `\jupynotex[3,12-]{somenote.ipynb}` + + +## Full Example + +Check the `example` directory in this project. + +There you will find an example `notebook.ipynb`, an `example.tex` file that includes cells from that notebook in different ways, and a `build` script. + +Play with it. Enjoy. + + +# Dependencies + +You need Python 3 in your system, and the [tcolorbox](https://ctan.org/pkg/tcolorbox) module in your LaTeX toolbox. + + +# Feedback & Development + +Please open any issue or ask any question [here](https://github.com/facundobatista/jupynotex/issues/new). + +To run the tests (need to have [fades](https://github.com/pyar/fades) installed): + + ./tests/run + +This material is subject to the Apache 2.0 license. diff --git a/Master/texmf-dist/doc/latex/jupynotex/example/build b/Master/texmf-dist/doc/latex/jupynotex/example/build new file mode 100755 index 00000000000..51598c0450f --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/example/build @@ -0,0 +1,5 @@ +#!/bin/sh + +ln -s ../jupynotex.py . +ln -s ../jupynotex.sty . +xelatex -shell-escape example.tex diff --git a/Master/texmf-dist/doc/latex/jupynotex/example/example.tex b/Master/texmf-dist/doc/latex/jupynotex/example/example.tex new file mode 100644 index 00000000000..5251586b63d --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/example/example.tex @@ -0,0 +1,27 @@ +\documentclass{article} + +\usepackage{jupynotex} + +\begin{document} + +One cell: + +\jupynotex[1]{notebook.ipynb} + + +A range of cells: + +\jupynotex[4-6]{notebook.ipynb} + + +Some specific cells: + +\jupynotex[12,17]{notebook.ipynb} + + +The whole notebook: + +\jupynotex{notebook.ipynb} + + +\end{document} diff --git a/Master/texmf-dist/doc/latex/jupynotex/example/notebook.ipynb b/Master/texmf-dist/doc/latex/jupynotex/example/notebook.ipynb new file mode 100644 index 00000000000..a423e9a11fa --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/example/notebook.ipynb @@ -0,0 +1,593 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Non alphanums {'>', '\\t', '[', '\\r', '\\x13', '\\x1d', '<', '^', '\\x04', '\\x08', '\\x17', '\\x1b', '\\\\', '\\x19', '`', '\\x1f', '$', '\\x0b', '\\x14', '\\x12', '\\x18', ')', '=', '?', ' ', \"'\", '\\x1c', '|', ';', '/', '\\x1e', '\\n', '\\x07', '\\x10', '\\x03', '\\x02', '#', '\\x0c', '@', '\\x16', '_', '}', '.', '-', '(', '!', '+', '\\x06', ']', '{', ':', '\\x01', '\\x11', '\\x0f', '\\x05', ',', '~', '\\x15', '\\x00', '\"', '%', '\\x0e', '*', '&', '\\x1a'}\n", + "Separators b'([\\\\>\\\\\\t\\\\[\\\\\\r\\\\\\x13\\\\\\x1d\\\\<\\\\^\\\\\\x04\\\\\\x08\\\\\\x17\\\\\\x1b\\\\\\\\\\\\\\x19\\\\`\\\\\\x1f\\\\$\\\\\\x0b\\\\\\x14\\\\\\x12\\\\\\x18\\\\)\\\\=\\\\?\\\\ \\\\\\'\\\\\\x1c\\\\|\\\\;\\\\/\\\\\\x1e\\\\\\n\\\\\\x07\\\\\\x10\\\\\\x03\\\\\\x02\\\\#\\\\\\x0c\\\\@\\\\\\x16\\\\_\\\\}\\\\.\\\\-\\\\(\\\\!\\\\+\\\\\\x06\\\\]\\\\{\\\\:\\\\\\x01\\\\\\x11\\\\\\x0f\\\\\\x05\\\\,\\\\~\\\\\\x15\\\\\\x00\\\\\"\\\\%\\\\\\x0e\\\\*\\\\&\\\\\\x1a])'\n" + ] + } + ], + "source": [ + "import string\n", + "\n", + "non_alphanums = set(chr(x) for x in range(127)) - set(string.ascii_letters) - set(string.digits)\n", + "print(\"Non alphanums\", non_alphanums)\n", + "separators = '([' + ''.join('\\\\' + x for x in non_alphanums) + '])'\n", + "separators = separators.encode('ascii')\n", + "print(\"Separators\", separators)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[b'dlg',\n", + " b'=',\n", + " b'Resource',\n", + " b'.',\n", + " b'loadfromresfile',\n", + " b'(',\n", + " b'filename',\n", + " b',',\n", + " b'win',\n", + " b',',\n", + " b'QuoteDialog',\n", + " b'.',\n", + " b'MyQuoteDialog',\n", + " b',',\n", + " b\"'\",\n", + " b'QuoteDialog',\n", + " b\"'\",\n", + " b',',\n", + " b'win',\n", + " b')']" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import re\n", + "program_line = b\"\"\"dlg = Resource.loadfromresfile(filename, win, QuoteDialog.MyQuoteDialog, 'QuoteDialog', win)\"\"\"\n", + "tokens = [t for x in re.split(separators, program_line) if (t := x.strip())]\n", + "tokens" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Checking base dir: HTML document\n", + "Checking base dir: C source\n", + "Checking base dir: C++ source\n", + "Checking base dir: PHP script\n", + "Checking base dir: ReStructuredText file\n", + "Checking base dir: Python script\n", + "Checking base dir: Ruby script\n", + "Checking base dir: Java source\n", + "Checking base dir: Objective-C source\n", + "Checking base dir: Perl5 module source\n", + "Checking base dir: XML 1.0 document\n" + ] + } + ], + "source": [ + "import os\n", + "from collections import Counter\n", + "\n", + "DUMP_BASE = '/home/facundo/devel/ml/dump'\n", + "\n", + "# directories with 1000 files of each code type, excluding \"just text\" (ascii, utf8, etc)\n", + "CODE_TYPES = [\n", + " 'HTML document',\n", + " 'C source',\n", + " 'C++ source',\n", + " 'PHP script',\n", + " 'ReStructuredText file',\n", + " 'Python script',\n", + " 'Ruby script',\n", + " 'Java source',\n", + " 'Objective-C source',\n", + " 'Perl5 module source',\n", + " 'XML 1.0 document',\n", + "]\n", + "\n", + "# let's collect ALL tokens present in all the program files\n", + "tokens = Counter()\n", + "for basedir in CODE_TYPES:\n", + " print(\"Checking base dir:\", basedir)\n", + " for dirpath, dirnames, filenames in os.walk(os.path.join(DUMP_BASE, basedir)):\n", + " for fname in filenames:\n", + " fpath = os.path.join(dirpath, fname)\n", + " with open(fpath, 'rb') as fh:\n", + " tokens.update(t for x in re.split(separators, fh.read()) if (t := x.strip()))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total different tokens 213400\n", + " 870525 b'.'\n", + " 756849 b'_'\n", + " 730609 b'('\n", + " 717433 b'='\n", + " 699725 b')'\n", + " 688556 b'/'\n", + " 661461 b'\"'\n", + " 640989 b','\n", + " 625121 b'-'\n", + " 594091 b'>'\n" + ] + } + ], + "source": [ + "different_tokens = len(tokens)\n", + "print(\"Total different tokens\", different_tokens)\n", + "for name, quant in tokens.most_common(10):\n", + " print(\"{:8d} {}\".format(quant, name))" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total tokens found: 20101050\n", + "Total representative: 3324\n", + "Last ten...\n", + " 289 b'enables'\n", + " 289 b'smaller'\n", + " 289 b'Creates'\n", + " 289 b'cross'\n", + " 289 b'GLFW'\n", + " 289 b'Os'\n", + " 289 b'usb'\n", + " 288 b'stylesheets'\n", + " 288 b'ad'\n", + " 288 b'WIDTH'\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "total_tokens = sum(tokens.values())\n", + "print(\"Total tokens found:\", total_tokens)\n", + "\n", + "most = total_tokens * 0.9\n", + "tot = 0\n", + "representative_data = []\n", + "for name, quant in tokens.most_common():\n", + " representative_data.append((name, quant))\n", + " tot += quant\n", + " if tot > most:\n", + " break\n", + "\n", + "print(\"Total representative:\", len(representative_data))\n", + "print(\"Last ten...\")\n", + "for name, quant in representative_data[-10:]:\n", + " print(\"{:8d} {}\".format(quant, name))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting data from base dir HTML document\n", + "Collecting data from base dir C source\n", + "Collecting data from base dir C++ source\n", + "Collecting data from base dir PHP script\n", + "Collecting data from base dir ReStructuredText file\n", + "Collecting data from base dir Python script\n", + "Collecting data from base dir Ruby script\n", + "Collecting data from base dir Java source\n", + "Collecting data from base dir Objective-C source\n", + "Collecting data from base dir Perl5 module source\n", + "Collecting data from base dir XML 1.0 document\n", + "Src data samples: 11000\n" + ] + } + ], + "source": [ + "import random\n", + "\n", + "# real \"ML data\": a list of (code_type, features) (one pair for each file)\n", + "# code_type: the *position* of the code type corresponding to the file (needs to be an int)\n", + "# features: a list of values, each value corresponds to how many of the tokens of that position the file has\n", + "\n", + "representative_tokens = [name for name, _ in representative_data]\n", + "\n", + "all_src_data = []\n", + "for idx, basedir in enumerate(CODE_TYPES):\n", + " print(\"Collecting data from base dir\", basedir)\n", + " for dirpath, dirnames, filenames in os.walk(os.path.join(DUMP_BASE, basedir)):\n", + " for fname in filenames:\n", + " fpath = os.path.join(dirpath, fname)\n", + " with open(fpath, 'rb') as fh:\n", + " fcontent = fh.read()\n", + " \n", + " file_tokens = Counter(t for x in re.split(separators, fcontent) if (t := x.strip()))\n", + " token_quantities = [file_tokens.get(t, 0) for t in representative_tokens]\n", + "\n", + " all_src_data.append((idx, token_quantities))\n", + "\n", + "print(\"Src data samples:\", len(all_src_data))\n", + "\n", + "# shuffle, as currently is too much \"per directory\"\n", + "random.shuffle(all_src_data)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "from tensorflow.keras import Model, layers\n", + "import numpy as np\n", + "\n", + "# representation of our model\n", + "num_classes = len(CODE_TYPES)\n", + "num_features = len(representative_tokens)\n", + "\n", + "# 1st and 2nd layer number of neurons (these numbers are just chamuyo)\n", + "n_hidden_1 = 128 \n", + "n_hidden_2 = 256\n", + "\n", + "# training parameters (more chamuyo)\n", + "learning_rate = 0.1\n", + "\n", + "batch_size = 256\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Example code types: (2, 0, 4, 4, 5)\n", + "Example token quants: [246, 786, 246, 61, 246] [32, 32, 5, 22, 5]\n", + "Token quants shape: (11000, 3324)\n", + "Example normalized quants: [0.3129771 1. 0.3129771 0.07760815 0.3129771 ] [0.9411765 0.9411765 0.14705883 0.64705884 0.14705883]\n" + ] + } + ], + "source": [ + "# separate the source data and into two pairing lists\n", + "code_types, token_quantities = zip(*all_src_data)\n", + "print(\"Example code types:\", code_types[:5])\n", + "print(\"Example token quants:\", token_quantities[0][:5], token_quantities[117][:5])\n", + "\n", + "# convert features to float\n", + "float_quantities = np.array(token_quantities, np.float32)\n", + "print(\"Token quants shape:\", float_quantities.shape)\n", + "\n", + "# normalize EACH ONE to [0, 1]\n", + "for quants in float_quantities:\n", + " quants /= max(quants)\n", + "print(\"Example normalized quants:\", float_quantities[0][:5], float_quantities[117][:5])" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Real training set: 89.70%\n" + ] + } + ], + "source": [ + "# let's prepare the teching sets, input and output for training first (90% of cases) and\n", + "# then testing what's learned (the remaining 10%); \n", + "input_training = []\n", + "output_training = []\n", + "input_testing = []\n", + "output_testing = []\n", + "for token_distribution, code_type in zip(float_quantities, code_types):\n", + " if random.random() < .1:\n", + " input_testing.append(token_distribution)\n", + " output_testing.append(code_type)\n", + " else:\n", + " input_training.append(token_distribution)\n", + " output_training.append(code_type)\n", + "print(\"Real training set: {:.2f}%\".format(100 * len(input_training) / len(float_quantities))) " + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "from tensorflow.keras import Model\n", + "\n", + "class NeuralNet(Model):\n", + " \"\"\"Chamuyo al cuadrado.\"\"\"\n", + " \n", + " def __init__(self):\n", + " super(NeuralNet, self).__init__()\n", + " self.fc1 = layers.Dense(n_hidden_1, activation=tf.nn.sigmoid) # se puede cambiar a relu\n", + " self.fc2 = layers.Dense(n_hidden_2, activation=tf.nn.sigmoid) # se puede cambiar a relu\n", + " self.out = layers.Dense(num_classes)\n", + "\n", + " def call(self, x, is_training=False):\n", + " x = self.fc1(x)\n", + " x = self.fc2(x)\n", + " x = self.out(x)\n", + " if not is_training:\n", + " # tf cross entropy expect logits without softmax, so only\n", + " # apply softmax when not training.\n", + " x = tf.nn.softmax(x)\n", + " return x\n", + "\n", + "neural_net = NeuralNet()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "# Note that this will apply 'softmax' to the logits.\n", + "def cross_entropy_loss(x, y):\n", + " # Convert labels to int 64 for tf cross-entropy function.\n", + " y = tf.cast(y, tf.int64)\n", + " # Apply softmax to logits and compute cross-entropy.\n", + " loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=x)\n", + " # Average loss across the batch.\n", + " return tf.reduce_mean(loss)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# Stochastic gradient descent optimizer.\n", + "optimizer = tf.optimizers.SGD(learning_rate)\n", + "\n", + "# Optimización. \n", + "def run_optimization(x, y):\n", + " # Funciones para calcular el gradiente\n", + " with tf.GradientTape() as g:\n", + " # Algoritmo de forward\n", + " pred = neural_net(x, is_training=True)\n", + " # Computa la función de costo o pérdida utilizando entropía cruzada\n", + " loss = cross_entropy_loss(pred, y)\n", + " \n", + " # Actualiza las variables de entrenamiento.\n", + " trainable_variables = neural_net.trainable_variables\n", + "\n", + " # Computa los gradientes\n", + " gradients = g.gradient(loss, trainable_variables)\n", + " \n", + " # Actualiza los nuevos parámetros W (pesos) y b (bias).\n", + " optimizer.apply_gradients(zip(gradients, trainable_variables))" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# Accuracy metric.\n", + "def accuracy(y_pred, y_true):\n", + " # Predicted class is the index of highest score in prediction vector (i.e. argmax).\n", + " correct_prediction = tf.equal(tf.argmax(y_pred, 1), tf.cast(y_true, tf.int64))\n", + " return tf.reduce_mean(tf.cast(correct_prediction, tf.float32), axis=-1)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 100: loss=2.379924, accuracy=0.125000\n", + "Step 200: loss=2.372284, accuracy=0.128906\n", + "Step 300: loss=2.366430, accuracy=0.140625\n", + "Step 400: loss=2.360768, accuracy=0.121094\n", + "Step 500: loss=2.347394, accuracy=0.109375\n", + "Step 600: loss=2.349795, accuracy=0.199219\n", + "Step 700: loss=2.312950, accuracy=0.218750\n", + "Step 800: loss=2.284508, accuracy=0.289062\n", + "Step 900: loss=2.173395, accuracy=0.335938\n", + "Step 1000: loss=2.115966, accuracy=0.300781\n", + "Step 1100: loss=1.977837, accuracy=0.441406\n", + "Step 1200: loss=1.860783, accuracy=0.425781\n", + "Step 1300: loss=1.866206, accuracy=0.425781\n", + "Step 1400: loss=1.773057, accuracy=0.402344\n", + "Step 1500: loss=1.736271, accuracy=0.546875\n", + "Step 1600: loss=1.626320, accuracy=0.578125\n", + "Step 1700: loss=1.537970, accuracy=0.539062\n", + "Step 1800: loss=1.369012, accuracy=0.609375\n", + "Step 1900: loss=1.286771, accuracy=0.625000\n", + "Step 2000: loss=1.270916, accuracy=0.640625\n" + ] + } + ], + "source": [ + "train_data = tf.data.Dataset.from_tensor_slices((input_training, output_training))\n", + "\n", + "# NOTE: this doesn't only selectes, it completely transform the structures\n", + "# from <TensorSliceDataset shapes: ((3324,), ()), types: (tf.float32, tf.int32)>\n", + "# to <PrefetchDataset shapes: ((None, 3324), (None,)), types: (tf.float32, tf.int32)>\n", + "train_data = train_data.repeat().shuffle(5000).batch(batch_size).prefetch(1)\n", + "\n", + "display_step = 100\n", + "training_steps = 2000\n", + "\n", + "# Run training for the given number of steps.\n", + "for step, (input_batch, output_batch) in enumerate(train_data.take(training_steps), 1):\n", + " # Run the optimization to update W and b values.\n", + " run_optimization(input_batch, output_batch)\n", + " \n", + " if step % display_step == 0:\n", + " pred = neural_net(input_batch, is_training=True)\n", + " loss = cross_entropy_loss(pred, output_batch)\n", + " acc = accuracy(pred, output_batch)\n", + " print(\"Step {}: loss={:f}, accuracy={:f}\".format(step, loss, acc))" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Test Accuracy: 0.578994\n" + ] + } + ], + "source": [ + "# Test model on validation set\n", + "input_testing = np.array(input_testing)\n", + "pred = neural_net(input_testing, is_training=False)\n", + "print(\"Test Accuracy: {:f}\".format(accuracy(pred, output_testing)))" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "# If we consider the \"more probable\" prediction on each item, how well it goes?\n", + "all_guesses = neural_net(input_testing, is_training=False)\n", + "guesses_ok = [0] * len(CODE_TYPES)\n", + "guesses_bad = [0] * len(CODE_TYPES)\n", + "\n", + "for guess, real in zip(all_guesses, output_testing):\n", + " # guess is an array of len(CODE_TYPES) with a float in each position showing\n", + " # which one is the most probable to be real, so we need to get position \n", + " # for the max one and check if it matches with the real real :)\n", + " position_for_max = np.where(guess == np.amax(guess))[0][0]\n", + " if position_for_max == real:\n", + " guesses_ok[real] += 1\n", + " else:\n", + " guesses_bad[real] += 1" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAFmCAYAAACC84ZkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/d3fzzAAAACXBIWXMAAAsTAAALEwEAmpwYAABBB0lEQVR4nO3debyc4/3/8dc7mwSJWPJNEZHUEiUbkqC2iLWqltb6pYKiliLftrZuWt+26LdVxY9GLaGUoLbqYkkT+5KECEFsDUIQscUSJD6/P65r5txnMnPOnGSue87yeT4e8zhz3/fM/bnuOTNzzbXLzHDOOecAOtU7Ac4551oPzxScc84VeabgnHOuyDMF55xzRZ4pOOecK/JMwTnnXJFnCq4RSQdLurPe6XCBpG0lzc4p1hxJOyU6dw9JD0jarYnHjJY0N7M9S9LoFOlxlXmm0M5JmiLpXUkrVPN4M7vGzHZZjngTJP1yWZ/f0UkySesXts3sPjMbVM801ch44Hdm9q9qn2Bmm5jZlHRJcuV4ptCOSRoAbAsYsGcO8TqnjtFeSOpS7zTkycwONbOb6p0O1zzPFNq3Q4GHgQnA2OwBSetIuknSfEkLJF0Y9x8m6f7M4zaSdJekdyTNlrR/5tgESRdL+oekj4DvAAcDp0j6UNLf4uO+Ekss78UqgT0z59hd0tOSFkp6TdIPy12IpE6SfiLpZUlvSbpK0ioVHjta0lxJP4iPnSfp8MzxVeLz58fz/URSVZ+FeB1nSXpU0geSbpW0Wub4nvEa34uP/Urm2BxJp0qaCXxUmjFIujfefSK+fgeUqVKZI+n0+Jq9K+kKSd0zx4+S9EL8f90maa0mruXb8foXSPpxybFOkk6T9GI8fn32Osucay9JM+Jr8mKhmkjSWjEd78R0HZV5To/4HnpX0tPAyJJzFquz4mOvjI99RtIpJa9LxfeYayEz81s7vQEvAMcBmwOfA33j/s7AE8DvgZWA7sA28dhhwP3x/krAq8DhQBdgU+BtYON4fALwPrA14QdG97jvl5k0dI3p+BHQDRgDLAQGxePzgG3j/VWBzSpcyxHxPF8GVgZuAv5c4bGjgcXAmTH+7sDHwKrx+FXArUBPYADwHPCdKl/TKcBrwOD4+vwVuDoe2xD4CNg5xj0lprlbPD4HmAGsA/SocH4D1i+5lrmZ7TnAU/EcqwEPFF7v+Nq+DWwGrABcANxbIc7GwIfAdvGx58bXbKd4/CTCD4p+8fh44NoK5xoV3wc7x/fB2sBG8di9wEXxvTEcmA+MicfOBu6L17FOvK7Sa90p89h74nukHzCz8FiaeY/5rYXfG/VOgN8S/WNhG0JGsEbcfhb4n3h/q/jh7FLmeYfRkCkcANxXcnw8cEa8PwG4quT4BBpnCtsCbwCdMvuuBX4e778CfBfo1cz1TAKOy2wPitdX7hpGA59kjwFvAVsSMsTPiBlbPPZdYEqVr+sU4OzM9sbxfJ2BnwLXZ451ImQgo+P2HOCIZs5fTaZwTGZ7d+DFeP8y4DeZYyvH12hAmTg/A67LbK8Ur6PwJfwMsGPm+JpNvN7jgd+X2b8OsATomdl3FjAh3n8J2C1z7Ogy17pT5rG7Zo4dSUOm0OR7zG8tu3n1Ufs1FrjTzN6O23+hoQppHeBlM1vczDnWBbaIRfL3JL1HqB76UuYxrzZzjrWAV83si8y+lwm/JgG+Rfhie1nSPZK2auI8L5ecowvQt8LjF5Rc38eEL8k1CL8sS8+1NtXLXvPL8XxrlKYxXvOrJedu7vValviFKqLS+B8CCyh/bWtlz2NmH8XHFqwL3Jz5vz9D+IIv93qvA7xYIcY7ZrawJL1rZ46XXkslpY99tfRYE+8x1wIdqrGro5DUA9gf6Czpjbh7BaC3pGGED1R/SV2ayRheBe4xs52beEzpNLul268D60jqlPnQ9idU2WBmU4G9JHUFvgdcT/iSKfU64YuqoD+huuPNJtJWztuEX7zrAk9nzvVaC86RTV//eL63YxqHFA5IUnxs9ty1mJa4NP7r8X6j10jSSsDqlL+2eUC2vWPF+NiCVwmlmgeqSM+rwHpl9r8OrCapZyZjyL7W8+K1zMocq2Qeodqo8D/LvgZNvsdcy3hJoX3am/CrbmNCPe5wwhfAfYTG50cJH7KzJa0kqbukrcuc53Zgw9gg2TXeRmYbT8t4k1DvX/AI4Vf6KfH5o4FvANdJ6qYwLmIVM/sc+AD4ovSE0bXA/0gaKGll4NfAxCpKO42Y2RJCxvMrST0lrQt8H7gaQo8thW6hA5o4zSGSNo5fpGcCN2bO+3VJO8ZM7gfAp8CDLUhi6etXzvGS+sWG3x8DE+P+a4HDJQ1X6IL8a+ARM5tT5hw3AntI2kZSt3gd2e+DPxJeo3UBJPWRtFeF9FwW4+4YG6jXlrSRmb1KuPaz4ntsKKEzwtXxedcDp0taVVI/4IQmrjn72LUJPyAKKr7Hmjifq6Te9Vd+q/0N+BehT3jp/v0Jda9dCL+kbiFUGbwNnB8fcxixTSFuDwL+TmiDWAD8Gxgej00g034Q921AaEx9D7gl7tuE0Ej4PuGX3j5xf7eY1ncJGcJUYoN3mbR3ItSDvxrTcjWx4bjMY0eTqZuO++bQUD+9anz+/Hi+nxHrown103OArhXOPYVQL/5oTPPfiO028fg+8Rrfj9e8Sbk0NPG/O4aQYb8X/1+NriWe4/QY4z3gSmDFkue/CLxDyNT7NRFrLKFNZwEhc8m+Rp0ImeVsQqPti8CvmzjXPoTG34WERt9d4/5+MR3vxHNk20NWJDT6vxev5+Qy11pIz0rAn+NjnwF+QmxLaeo95reW3xRfUOccIOknwHwzG1/h+BRCb6NLc01YQ/w5wJFmdnc94rcWko4FDjSz7eudlvbG2xScyzAzH43dCklak1Ct9hChNPoD4MK6Jqqd8kzBOdcWdCN0fR1IqEK6jjD+wdWYVx8555wr8t5Hzjnnitp89dEaa6xhAwYMqHcynHOuTZk+ffrbZtandH+bzxQGDBjAtGnT6p0M55xrUySVHUHu1UfOOeeKPFNwzjlX5JmCc865ojbfpuA6ls8//5y5c+eyaNGieiel3ejevTv9+vWja9eu9U6KawU8U3Btyty5c+nZsycDBgwgTELqloeZsWDBAubOncvAgQPrnRzXCnj1kWtTFi1axOqrr+4ZQo1IYvXVV/eSlytKmilIulxhjdynMvv+T9KzkmZKullS78yx0+M6rrMl7Zoyba7t8gyhtvz1dFmpSwoTgN1K9t0FDDazoYRFME4HkLQxcCBhCtzdgIskdU6cPueccxlJ2xTM7N7SxUrM7M7M5sPAvvH+XoQ1Yz8F/iPpBcKC4A+lTKNr2wac9veanm/O2V+v6nFvvPEG48aNY+rUqfTu3Zu+ffty3nnnseGGG9Y0PeVMmDCBXXbZhbXWWqv5BzvXQvVuaD6ChlWj1iZkEgVzqbDGqqSjCYt8079/Uyv4OVd7ZsY+++zD2LFjue66sLjXE088wZtvvtlsprB48WK6dOlScbsaEyZMYPDgwe0jU/j5KgnP/X66c7djdWtolvRjwhq717T0uWZ2iZmNMLMRffosNXWHc0lNnjyZrl27cswxxxT3DRs2jG222YaTTz6ZwYMHM2TIECZODL93pkyZwrbbbsuee+7JxhtvvNT2kiVLOPnkkxk5ciRDhw5l/PiG9X3OOecchgwZwrBhwzjttNO48cYbmTZtGgcffDDDhw/nk08+YdKkSWy66aYMGTKEI444gk8//TT318S1H3UpKUg6DNgD2NEa5u5+jcaLcfejZYupO5eLp556is0333yp/TfddBMzZszgiSee4O2332bkyJFst912ADz22GM89dRTDBw4kClTpjTavuSSS1hllVWYOnUqn376KVtvvTW77LILzz77LLfeeiuPPPIIK664Iu+88w6rrbYaF154Ib/97W8ZMWIEixYt4rDDDmPSpElsuOGGHHrooVx88cWMGzcu51fFtRe5lxQk7QacAuxpZh9nDt0GHChpBUkDCasrPZp3+pxbVvfffz8HHXQQnTt3pm/fvmy//fZMnToVgFGjRjUaB5DdvvPOO7nqqqsYPnw4W2yxBQsWLOD555/n7rvv5vDDD2fFFVcEYLXVVlsq5uzZsxk4cGCx2mrs2LHce++9qS/VtWNJSwqSriUsPL6GpLnAGYTeRisAd8WucA+b2TFmNkvS9YRFtxcDx5vZkpTpc25ZbLLJJtx4440tes5KK61UcdvMuOCCC9h118a9sO+4445lT6RzyyhpScHMDjKzNc2sq5n1M7PLzGx9M1vHzIbH2zGZx//KzNYzs0Fm9s+UaXNuWY0ZM4ZPP/2USy65pLhv5syZ9O7dm4kTJ7JkyRLmz5/Pvffey6hRo5o936677srFF1/M559/DsBzzz3HRx99xM4778wVV1zBxx+HAvU777wDQM+ePVm4cCEAgwYNYs6cObzwwgsA/PnPf2b77X0te7fs6t37yLnlUm0X0lqSxM0338y4ceM455xz6N69OwMGDOC8887jww8/ZNiwYUjiN7/5DV/60pd49tlnmzzfkUceyZw5c9hss80wM/r06cMtt9zCbrvtxowZMxgxYgTdunVj991359e//jWHHXYYxxxzDD169OChhx7iiiuuYL/99mPx4sWMHDmyUQO4cy3V5tdoHjFihPkiOx3HM888w1e+8pV6J6Pdqdvr6l1S60bSdDMbUbq/Y5cUUr0h/c3onGujfEI855xzRZ4pOOecK/JMwTnnXJFnCs4554o8U3DOOVfUsXsfubav1j3Iqug5tvLKK/Phhx/WNm4NnXfeeRx99NHF6TGcawkvKTjXiixevLjJ7Wqcd955xVHQzrWUZwrOLaMpU6YwevRo9t13XzbaaCMOPvhgCoNBp06dyle/+lWGDRvGqFGjWLhwIYsWLeLwww9nyJAhbLrppkyePBkI6yPsueeejBkzhh133HGp7Y8++ogjjjiCUaNGsemmm3LrrbcCsGTJEn74wx8yePBghg4dygUXXMD555/P66+/zg477MAOO+wAwLXXXsuQIUMYPHgwp556an1eLNdmePWRc8vh8ccfZ9asWay11lpsvfXWPPDAA4waNYoDDjiAiRMnMnLkSD744AN69OjBH/7wByTx5JNP8uyzz7LLLrvw3HPPAWFq7ZkzZ7LaaqsxYcKERts/+tGPGDNmDJdffjnvvfceo0aNYqedduKqq65izpw5zJgxgy5duhSn1j733HOZPHkya6yxBq+//jqnnnoq06dPZ9VVV2WXXXbhlltuYe+9967vC+daLS8pOLccRo0aRb9+/ejUqRPDhw9nzpw5zJ49mzXXXJORI0cC0KtXL7p06cL999/PIYccAsBGG23EuuuuW8wUdt5550ZTY2e377zzTs4++2yGDx/O6NGjWbRoEa+88gp333033/3ud4srt5WbWnvq1KmMHj2aPn360KVLFw4++GCfWts1yUsKzi2HFVZYoXi/c+fOy9QGAM1Prf3Xv/6VQYMGLVsinWsBLyk4V2ODBg1i3rx5xQV2Fi5cyOLFi9l222255pqw+uxzzz3HK6+8UtUX/a677soFF1xQbK94/PHHgVCaGD9+fDEjKje19qhRo7jnnnt4++23WbJkCddee61Pre2a5CUF17a1wskHu3XrxsSJEznhhBP45JNP6NGjB3fffTfHHXccxx57LEOGDKFLly5MmDChUUmjkp/+9KeMGzeOoUOH8sUXXzBw4EBuv/12jjzySJ577jmGDh1K165dOeqoo/je977H0UcfzW677cZaa63F5MmTOfvss9lhhx0wM77+9a+z11575fAquLaqY0+d7bOktjk+dXYaPnV2x1Np6myvPnLOOVfk1UeudvxXn3NtnpcUXJvT1qs8Wxt/PV2WZwquTenevTsLFizwL7IaMTMWLFhA9+7d650U10p49ZFrU/r168fcuXOZP39+vZPSbnTv3p1+/frVOxntVxvr0OKZgmtTunbtysCBA+udDOfaLa8+cs45V+SZgnPOuaKk1UeSLgf2AN4ys8Fx32rARGAAMAfY38zelSTgD8DuwMfAYWb2WMr05c67bLrWzt+jHV7qksIEYLeSfacBk8xsA2BS3Ab4GrBBvB0NXJw4bc4550okzRTM7F7gnZLdewFXxvtXAntn9l9lwcNAb0lrpkyfc865xurR+6ivmc2L998A+sb7awOvZh43N+6bRwlJRxNKE/Tv3z9dSl3r18a6+znX2tW1odnCCKQWj0Iys0vMbISZjejTp0+ClDnnXMdUj5LCm5LWNLN5sXrorbj/NWCdzOP6xX3OdVze8Ft7XrpsUj1KCrcBY+P9scCtmf2HKtgSeD9TzeSccy4HqbukXguMBtaQNBc4AzgbuF7Sd4CXgf3jw/9B6I76AqFL6uEp0+acc25pSTMFMzuowqEdyzzWgONTpsc551zTfESzc865Is8UnHPOFXmm4Jxzrsinzm7PvDujc66FvKTgnHOuyDMF55xzRZ4pOOecK/JMwTnnXJE3NDvnXEIDFv0lyXnnJDmrlxScc85leEnBuZbwGTZdO+clBeecc0WeKTjnnCvy6iNXM6ka1CBdo5pzrjEvKTjnnCvykoJzrshLe85LCs4554o8U3DOOVfkmYJzzrkizxScc84VeabgnHOuyDMF55xzRZ4pOOecK6p6nIKkbsCGcXO2mX2eJknOOefqpapMQdJo4ErC+BMB60gaa2b3JkuZc8653FVbUvgdsIuZzQaQtCFwLbB5qoQ555zLX7VtCl0LGQKAmT0HdF2ewJL+R9IsSU9JulZSd0kDJT0i6QVJE2OVlXPOuZxUmylMk3SppNHx9idg2rIGlbQ2cCIwwswGA52BA4FzgN+b2frAu8B3ljWGc865lqs2UzgWeJrwRX5ivH/scsbuAvSQ1AVYEZgHjAFujMevBPZezhjOOedaoKo2BTP7FDg33pabmb0m6bfAK8AnwJ3AdOA9M1scHzYXWLvc8yUdDRwN0L9//1okyTnXQaSaCXZOkrPmr9reR08CVrL7fUIV0i/NbEFLgkpaFdgLGAi8B9wA7Fbt883sEuASgBEjRpSmyznn3DKqtvfRP4ElQCGLPZBQ5fMGMAH4Rgvj7gT8x8zmA0i6Cdga6C2pSywt9ANea+F5nXPOLYdqM4WdzGyzzPaTkh4zs80kHbIMcV8BtpS0IqH6aEdCqWMysC9wHTAWuHUZzu2cc24ZVdvQ3FnSqMKGpJGEHkMAi8s/pTIze4TQoPwY8GRMxyXAqcD3Jb0ArA5c1tJzO+ecW3bVlhSOBC6XtDJhRPMHwJGSVgLOWpbAZnYGcEbJ7peAUWUe7pxzLgfV9j6aCgyRtErcfj9z+PoUCXPOOZe/JjMFSYeY2dWSvl+yHwAzq0kXVeecc61DcyWFleLfnqkT4pxzrv6azBTMbHz8+4t8kuOcc66emqs+Or+p42Z2Ym2T41zr5qNhXXvXXJfU6fHWHdgMeD7ehgM+g6lzzrUzzVUfXQkg6Vhgm8K8RJL+CNyXPnnOOefyVO3gtVWBXpntleM+55xz7Ui1g9fOBh6XNJkweG074OepEuWcc64+qh28doWkfwJbEGZLPdXM3kiaMuecc7mrtqQAYfqJbeN9A/5W++Q455yrp4qZgqTtgIfM7HNJZwMjgWvi4RMlbWVmP8ojkc5V4l1EnautphqaFwF/jPd3B3Y2s8vN7HLCgjh7pE6cc865fFUsKZjZo5I+yuzqDbwT76+SMlHOOefqo7lxCrPi3bNYuvfRaYnT5pxzLmfV9j66VtIUQrsCeO8j55xrl6oavCZpH+BjM7vNzG4DFknaO2nKnHPO5a7aEc1nZBfWMbP3WHrVNOecc21cteMUymUeLRnj4JxzS0nVpRi8W/GyqrakME3SuZLWi7dzCbOnOueca0eqzRROAD4DJgLXEcYwHJ8qUc455+qj2t5HH+FdUJ1zrt2rtqTgnHOuA/BMwTnnXJFnCs4554qqHby2oaRJkp6K20Ml/WR5AkvqLelGSc9KekbSVpJWk3SXpOfjX1/dzTnnclRtSeFPwOnA5wBmNhM4cDlj/wH4l5ltBAwDniE0Zk8ysw2ASXjjtnPO5araTGFFM3u0ZN/iZQ0qaRXCpHqXAZjZZ3GU9F7AlfFhVwJ7L2sM55xzLVdtpvC2pPUIK64haV9g3nLEHQjMB66Q9LikSyWtBPQ1s8J53wD6lnuypKMlTZM0bf78+cuRDOecc1nVZgrHA+OBjSS9BowDjl2OuF2AzYCLzWxTYKlxEGZmxEyolJldYmYjzGxEnz59liMZzjnnsqodvPYSsFP8Nd/JzBYuZ9y5wFwzeyRu30jIFN6UtKaZzZO0JvDWcsZxzjnXAk1mCpK+X2E/AGZ27rIENbM3JL0qaZCZzQZ2BJ6Ot7HA2fHvrctyfuecc8umuZJCz/h3EGGBndvi9jeA0obnljoBuEZSN+Al4HBCddb1kr4DvAzsv5wxnHPOtUBzy3H+AkDSvcBmhWojST8H/r48gc1sBjCizKEdl+e8zjnnll21Dc19CbOkFnxGhZ5Bzjnn2q5qF8q5CnhU0s1xe29gQooEOeca+CI0Lm/V9j76laR/AtvGXYeb2ePpkuWcc64eql5S08weAx5LmBbnnHN15rOkOuecK/JMwTnnXFHV1UeS+hLGKgA8amY+2tg559qZatdT2J8wWG0/woCyR+KkeM4559qRaksKPwZGFkoHkvoAdxPmLHLOOddOVNum0KmkumhBC57rnHOujai2pPAvSXcA18btA4B/pEmSc865eql28NrJkr4JbBN3XWJmNzf1HOecc21P1b2PgAeBJcAXwNQ0yXHOOVdP1fY+OpLQ+2gfYF/gYUlHpEyYc865/FVbUjgZ2NTMFgBIWp1Qcrg8VcKcc87lr9oeRAuA7BKcC+M+55xz7Ui1JYUXCAPWbgUM2AuYWViuc1mX5XTOOde6VJspvBhvBYW1k3uWeaxzzrk2qtouqb9InRDnnHP112SmIOlCM/uepL8Rqo0aMbM9k6XMOedc7porKRwKfA/4bQ5pcc45V2fNZQovApjZPTmkxTnnXJ01lyn0KfQwKsd7HTnnXPvSXKbQGVgZUA5pcc45V2fNZQrzzOzMXFLinHOu7pob0ewlBOec60CayxR2TBlcUmdJj0u6PW4PlPSIpBckTZTULWV855xzjTWZKZjZO4njnwQ8k9k+B/i9ma0PvAt8J3F855xzGXVbUlNSP+DrwKVxW8AYGtZ9vhLYuy6Jc865Dqqe6yyfB5xCWLQHYHXgPTNbHLfnAmuXe6KkoyVNkzRt/vz5yRPqnHMdRV0yBUl7AG+Z2fRleb6ZXWJmI8xsRJ8+fWqcOuec67hashxnLW0N7Clpd6A70Av4A9BbUpdYWugHvFan9DnnXIdUl5KCmZ1uZv3MbABwIPBvMzsYmExY7hNgLA1TdDvnnMtBPdsUyjkV+L6kFwhtDJfVOT3OOdeh1Kv6qMjMpgBT4v2XgFH1TI9zznVkra2k4Jxzro48U3DOOVfkmYJzzrkizxScc84VeabgnHOuyDMF55xzRZ4pOOecK/JMwTnnXJFnCs4554o8U3DOOVfkmYJzzrkizxScc84VeabgnHOuyDMF55xzRZ4pOOecK/JMwTnnXJFnCs4554o8U3DOOVfkmYJzzrkizxScc84Vdal3AjqSAYv+kuzcc5Kd2TnXkXhJwTnnXJFnCs4554o8U3DOOVfkmYJzzrmiumQKktaRNFnS05JmSTop7l9N0l2Sno9/V61H+pxzrqOqV0lhMfADM9sY2BI4XtLGwGnAJDPbAJgUt51zzuWkLpmCmc0zs8fi/YXAM8DawF7AlfFhVwJ71yN9zjnXUdW9TUHSAGBT4BGgr5nNi4feAPpWeM7RkqZJmjZ//vx8Euqccx1AXTMFSSsDfwXGmdkH2WNmZoCVe56ZXWJmI8xsRJ8+fXJIqXPOdQx1yxQkdSVkCNeY2U1x95uS1ozH1wTeqlf6nHOuI6pX7yMBlwHPmNm5mUO3AWPj/bHArXmnzTnnOrJ6zX20NfBt4ElJM+K+HwFnA9dL+g7wMrB/fZLnnHMdU10yBTO7H1CFwzvmmRbnnHMN6t77yDnnXOvRoafOTjWV9ZwkZ3XOufS8pOCcc67IMwXnnHNFnik455wr8kzBOedckWcKzjnnijxTcM45V+SZgnPOuSLPFJxzzhV5puCcc67IMwXnnHNFnik455wr8kzBOedckWcKzjnnijxTcM45V+SZgnPOuSLPFJxzzhV5puCcc67IMwXnnHNFnik455wr8kzBOedckWcKzjnnijxTcM45V+SZgnPOuSLPFJxzzhW1ukxB0m6SZkt6QdJp9U6Pc851JK0qU5DUGfh/wNeAjYGDJG1c31Q551zH0aoyBWAU8IKZvWRmnwHXAXvVOU3OOddhyMzqnYYiSfsCu5nZkXH728AWZva9kscdDRwdNwcBs3NI3hrA2znEqVe8esT0eB6vtcdsz/HWNbM+pTu75BS8pszsEuCSPGNKmmZmI9prvHrE9Hger7XHbO/xymlt1UevAetktvvFfc4553LQ2jKFqcAGkgZK6gYcCNxW5zQ551yH0aqqj8xssaTvAXcAnYHLzWxWnZNVkGt1VR3i1SOmx/N4rT1me4+3lFbV0Oycc66+Wlv1kXPOuTryTME551yRZwouN5K2rmZfW9Xer8/VnqSB1ezLk2cKFUj6czX7ahhPkg6R9LO43V/SqFTxMnHXlbRTvN9DUs+E4S6ocl/NtOfrk3RONftqGG9FST+V9Ke4vYGkPVLFy8TdRtLh8X6f1F+aOb9n/lpm340J4zWrVfU+amU2yW7EeZk2TxjvIuALYAxwJrCQ8IYZmSqgpKMII8NXA9YjjAv5I7BjjeNsBXwV6CPp+5lDvQi9zJJo79cH7AycWrLva2X21coVwHRgq7j9GnADcHuieEg6AxhBmLngCqArcDWQpASW43tmI8J3zCqSvpk51AvoXstYLeWZQglJpwM/AnpI+qCwG/iMtN3FtjCzzSQ9DmBm78axGikdT5hv6pEY83lJ/5UgTjdgZcL7Lfur6wNg3wTxCtrl9Uk6FjgO+LKkmZlDPYEHah0vYz0zO0DSQQBm9rEkJYwHsA+wKfBYjPl64l/ueb1nBgF7AL2Bb2T2LwSOShCvap4plDCzs4CzJJ1lZqfnGPrzWBoxCMVkQskhpU/N7LPC51pSl0L8WjKze4B7JE0ws5cl9Qq7bWGtY5Vor9f3F+CfwFlAdnr5hWb2TqKYAJ9J6kHDe3Q94NOE8QA+MzOTVIi5UuJ4eb1nbgVulbSVmT1U6/MvD88UKjCz0yWtDaxL5nUys3sThTwfuBn4L0m/IvzC/EmiWAX3SCqUinYm/Pr8W8J4fSTdTvw1Lel94Agzm54oXru8PjN7H3ifMLX8ZsA2hC+uB4CUmcIZwL+AdSRdQ6jCOSxhPIDrJY0HeseqnSOAPyWMl/d75oUYbwCNv2eOSBizST54rQJJZxOm2XgaWBJ3m5ntmTDmRoS6SwGTzOyZVLFivE7Ad4BdYsw7gEst0ZsiVnUcb2b3xe1tgIvMbGiieO39+n4K7A/cFHftDdxgZr9MES/GXB3YkvB6PmxmyWf0jF/Oxf+hmd2VMFbe75kHgfsIbTWF7xnMrFwDdC48U6hA0mxgqJmlLh4X4m0JzCpUOcQqiK+Y2SMJY64ELDKzJXG7M7CCmX2cKN7jZrZpyb7HzGyzFPHylvf1xffoMDNbFLd7ADPMbFCiePsA/44lFST1Bkab2S0p4sUYA4F5JdfY18zmJIqX92dihpkNT3HuZeVdUit7idDTIS8XAx9mtj+M+1KaBPTIbPcA7k4Y7x5J4yWNlrS9pIuAKZI2i9UgNSHpSUkzK91qFaeMXK4v43Ua91RZgbSzCp9RyBAAzOw9QpVSSjfQuG1tSdyXSt6fidsl7Z7w/C3mbQqVfQzMkDSJTGOamZ2YKJ6yRVQz+yI2cqXU3cyKGZGZfShpxYTxhsW/pV8kmxLqxMfUKE7yvvMV5HV9Be8DsyTdFc+/M/CopPMhyXu13I/I1O/RLnEVRgBiI3DKXnl5fyZOAn4k6TNCD0eFsNYrYcwmeaZQ2W3kO233S5JOpKF0cByhtJLSR5I2M7PHACRtDnySKpiZ7ZDq3CVxXs4jTpm4uVxfxs3xVjAlcbxpks4lrKMOoftmqk4CBfMl7WlmtwFI2ou0K5Pl/ZlI2b12mXibQhNi/WV/M0u+3GfsC30+4dekEYqx48zsrYQxRwATCdUQAr4EHFDr3jKSDjGzq9V4YFeRmZ1b43j3m9k2khbSuDthkl9heV9fvcT69p8COxFe17uAX5nZRwljrgdcA6wVd80Fvm1mLyaKl8tnIhNPwMHAQDP7X0nrAGua2aMp4lXDSwoVSPoG8FvCwKSBkoYDZ6bofRQbs35vZgfW+tzNxNwW2IgwkAZgtpl9niBcoW95Xr+KDoVcf4Xlen2Srjez/SU9SZk+9Cl6O8X3y+15loZizGPNbEtJK0OozkkcL6/PREF2JoP/JbQl/j8SzmTQHC8pVCBpOuEfNaXQo0TSU2Y2OFG8+4Ex2frT1CQ9ambJ51eKsToDJ5rZ73OINd3MNpc0ycxqOj1BEzHzvL41zWyepHXLHU9VfRbb176ZbWxOTdLDZrZljvFy+0zEeI9ZnMkg8z3zhJkNa+65qXhJobLPzex9NR7Fn3KE8UvAA5JuA4rF8cRVDw9IupBQXM7GfKzWgcxsicL0CMm/NIFOcUDQhuWqdFK8pnleX8wQOgMTcm7H+BB4MjZsZ98vqTpfADwePxM3lMS8qfJTlktun4moHjMZNMkzhcpmSfpvoLOkDYATgQcTxnsx3jqRXzXL8Pj3zMy+FL1kCvL6wB1IGMhVOhdRanlnsl9IWiXHX+430TBQLi/dgQU0fk9awnQMj3/z+kzUYyaDJnn1UQWxG9qPaTyy8X8Lg2hcy0maXGa3mVmSD5ykr5nZP1Ocu0K8vK/vVkJ31zx/ubsaU84zGTSbHs8UWof4hVKu0TDVLxQU124oE/PMcvtd6yJpbLn9ZnZlonj/ofx79Msp4sWYV1SImWRuoHp8JiStCqxD47mPUlVXNcurjyqIXdPKTVSVZB4b4IeZ+92BbwGLE8UqyHYl7E4Y9JXsV4qkXwO/iSNhCx+GH5hZXYvLtVKH67uRMlMyJIoFYV2Dgu7AfoR1B1LKrtXQnTCV9usJ4+X9mfhfwqSCL9KQ+aWsrmo+TV5SKE9hXpmTgSfJNPzkOTCqDj0hViBMODY60fnznhtoBSuZu6rcvhrGy/v6HgZ2KnTTjN027zSzr6aIVyEN080s5eJTpfE6AffndY05fCZmA0Py7HXYHC8pVDa/MIoyD5Kyv7g6EVZ5WyWv+NGKhJWmUumc/VKOgwNT/rJ9CCj9Qi63r1byvr5cp2Qomb+pE6HkkPd3yAZAikVvKkn9mXiKsNBOskGqLeWZQmVnSLqUMLI4O/dRql4P0wnFRhGqjf5DmMI3mZLBT52BPjTudVFr1wCTYj0xwOFAzeu/JX0JWJswJ/6mhNcUwlKHKeexyeX6MnKdkgH4Xeb+YmAOYeruZDKj0hX/vkG65Ubr8Zk4i9Dt9ikaf88km6K/OV59VIGkqwkjG2fRUH1kqRq46qFk8NNi4E0zS9qOIWk3wjQJAHeZ2R0JYowl1NOOAKbSkCl8AFyZMGPP5foysUYC15HTlAwdQd6fCUmzgPEsXU19T6qYzabJM4XyJM22RPPSV4jXFTgW2C7umgKMTzzEHknDCEP7Ae41s2RTS8e5cz6xMAPsIMJUAv9MdY2SvmU5LlaS9/XFmF3JaUoGSasQZoAtvEfvIUz9knSchKQ9MzGnmNntTT2+BvHy/ExMNbO6TWlRjq+nUNmDkjbOMd7FhHaEi+JtcxKvpyDpJEKVx3/F2zWSTkgY8l6gu8Iyp/8Cvg1MSBhv7/hFBoRfgXGqhlRyvT5J+xHaFZ4iDNabqDTrNhRcTlhYfv94+wC4oslnLCeFFRBPIqyA+DRwUuzllSpe3p+J+ySdJWkrxXU3Ev8Pm2dmfitzI3RD+wyYDcwkFO9mJoz3RDX7ahxzJrBSZnulxNf4WPx7AnBKvD8jYbzvAs8CuwNHAc8B32hH1zcz/t0GmAx8HXgkYbylriXl9RWuEeiU2e6c+D2a92dicpnbv1O+ps3dvKG5st1yjrdE0noWpwSW9GUya7YmopIYS2iof08ST9JWhKmCC43onVMFM7Pxsc52MmEO/k3N7I1U8cj5+mj4330d+JOZ/V1SsvWZgU8kbWNm9wNI2pq0DdsFvYF34v3UPfJy/UxY/mtwNMszhcrybmw5GZgs6SXCm3BdQu+VlK4AHpF0c4y5F3BZwngnAacDN5vZrJjxlZsaoiYkfZsw//+hwFDgH5ION7MnEoXM9fqA1ySNJ6y4dk7sU5+ySvhY4MpYJSfCF/VhCeNBQ++cyTHmdsBpCePl+plojbMKeENzBZmuaSKMbBxIaMjbJGHMFWjcaJhkkFVJzM0I1Q8A95nZ46lj5kXSLcDRFhcqkjQKuMRa2ULpyyqOSdgNeNLMnpe0JmEg1J2J4/YCMLMPUsbJxFuThvUFHk1c2sv1MyHpB5nN4ghqq2MvR88UqhTfKMeZ2ZGJzr8f8C8zWyjpJ4QBVr+0hHOgKKxqNdfMPpW0AzAEuMriNA3thaQVzezjeL+btaLRo21JbIS9gtDY/CfCe/S0lJlQrKKaYWYfSTokxvyDpVszoq6fidQjqKvhvY+qFL+ct0gY4qcxQ9iGMGPiZSTufQT8ldCWsT7wR8KkXH9JHDM3sUfH04TG5kJXw/Pqmqi27YhYOtgFWJ3Qu+rsxDEvBj6O/7vvE+YIuiphvHp/JlKPoG6WtylUoMaLs3Qi/EJJORFX3o2GAF+Y2WJJ3wQuNLMLJKUsKq9hZikXXS91HrArcBuAmT0habsmn+GaUmhw3Z3w63mWpJQdEwAWm5lJ2gv4f2Z2maSUI/3z/kzkPYK6WZ4pVJZdnGUx8HfCr4hU8m40hLDq00GEhthvxH1dax1EYb3ry4HFkpYA+5tZygWLiszs1ZLvrWQ9uhRWzTqKpWfWTTXN8wnA1Wb2borzlzFd0p2E9rXTJfUk/SphCyWdDhwCbKcwIV7N36MZuXwmMvbI3M9lVoHmeKZQgZn9IueQ+xMaDX9rZu/FxrWTE8c8HDgG+JWZ/UfSQODPCeL8CtjWzJ6VtAXwG2D7BHEAkLSlmT0MvCrpq4DFkb8nkXAaZOBW4D7gbtJ3JwboC0yV9Bgh073D0jYSfoewMtlLZvaxpNVJ30PuAOC/ge+Y2RuS+gP/lzBeXp+JgjWBWWa2EEBST0kbm9kjCWM2yRuaK1BYh3Y/azw3/nVmtmtdE9YGqWT66NLtVPEkrQH8gTAXkYA7gZPMbEGiuDPy7tkUq292IXyZjQCuBy4rjHdxrVusmtqskJnHktC0lJ+P5nhJobI+2R4HZvaupDyn7G1P/qukjabRtpmdmyJobL84OMW5K7hd0u5m9o+8Asb69jcIs4cuBlYFbpR0l5mdklc63DJTtnRnYd6sun4ve6ZQ2RJJ/c3sFSjOnujFqmXzJxq30ZRu19qXJVVcC8PSTUt8EvAjSZ8BhYnpzMx6pQgWu4geShitfSlwspl9Hn9tPg94ptD6vSTpRBp6Gh4HvFTH9Him0IQfA/dLuodQ9bAtcHStg8Sub33N7IGS/VsDb7SHaoA6tM/Mp/Hc/7kws5QZXTmrAd8s7bMff23uUeE5yy2WmLtn4r2SIEYfQmn96ZL9GxMWwJpf65glcYpjWxI7BjgfKCzZejcJvmdawtsUmhDrpLeMmw+n6E4p6XbgdDN7smT/EODXZvaN8s+saRpON7OzEp7//KaOm9mJNY73uJUsi5kX5T/Nc2H0rQEPJB7suCchs12LsFLYuoTRtzUf5S/pOuAiM7u3ZP+2wLFm9t+1jhnP/1VCqWtlM+sfx0d818yOSxGvNfLBa037KjA63rZs8pHLrm9phgAQ9w1IFLPUfonPPz1z27NkO8WCMP9JcM5mqfw0zykz258SVnZbHVgDuCKOhk/lfwmfg+fMbCBhkOXDiWKtX5ohAJjZfYR5rFL5PWFsy4IY7wkaMvmak9RP0s2S3oq3v0qq6+A1LylUED/gIwlzqwMcBEw1sx/VOM7zZrZBhWMvmNn6tYxXIU7S3kAlsZL/ipf0LZpo/7FEK69JmgkMN7Mv4nZn4HEzS/IlprDo+zAzWxS3exCmhEiyOJSkaWY2QtIThBlnv5D0hJkNSxCr4iJXTR2rQdxHzGyL7Ps01TXGc99FGDFd6PZ6CHCwme2cIl41vE2hst1p/AG/EngcqGmmAEyTdJSZ/Sm7U9KRpPkVXTj/f2iY8G9NNczOamb25VRxyaexvlCf/l+E0t6/4/YOwINAsuU4yXea59cJdfuL4vYKwGsJ470naWXCYkLXSHoL+ChRrBfK9eSS9DXSNsTmPbalj5llFyqaIGlcwnjN8kyhab1J/wEfB9ws6WAaMoERQDdgn0QxicV/oL518CmY2eEAcfTtxmY2L26vSdqV3nKZ5lnSBYTM9X1gVvy1aYTR8I/WOl7GXoT1E/6H0NV3FdJNyTAO+Luk/Wn8udiKxqOAa+0YwtiWtQkZ7J3A8QnjLVCY6O/auH0QseqqXrz6qII41P1swnz4xQ+4mU1MFG8HYHDcnGVm/27q8TWOnTRTkLSQhlJJD6DQq6NQMknVZfMZM/tKZrsT4bX9ShNPW96Yyad5ljS2qeNmdmWtY8a43wcmmlnK0kg23gqE0czFzwXwl0J1WXsQu7pfQMjsjFCSPTFFj66q0+SZQmV5fMBbA0kXmtn36p2OWpN0IbABDb/CDgBeMLNka+4qrM+8Lo3nPlqqwbQtknQGYTqWd4CJwA1m9maO8fdI1ZsrU/oqq9Y95FozzxRKqJlFs1N2+WuvJHUnFMvXJ6yBe7nlNOmXpH1o6D1yr5ndnDDWOYSMZxYNE8VZqsFymXahRhK3CSFpKOE6v0VYe2CnlPEycZN1iMi79NWaMyFvU1haYdBTd0Id5hOEao6hwDRCMa9dkbSJmc1KGOJKwgjf+wgN+JsQGvDy8Biw0MzulrSipJ4WJx9LYG9gkOWwYl40InO/O6Fr8Wo5xH2LMK3GAkJjfl5SrpWcpMqtCdPi362BjQklLwj/w6fLPiMnXlKoQNJNwBmFMQSSBgM/N7N965uy2sthgronzWxIvN+FUBWXvAuspKMIo0NXM7P1JG0A/NHMdkwU75+ESRQ/THH+KtMw3cw2T3Tu4wjVR32AG4DrS0ccpyRplJmlbEgndhIoV/oakyjew8A2hZJz7PF0n5mlGhfVLC8pVDYoO6jMzJ6SlKyBss5SL5RSmAcICwuYJA5XdDwwCngkxn5eCSY1zFQFfAzMkDQJKJYWUlUFlFR1diKUHFJ+ptcBxpnZjIQxGlFYh/oHQH8zOypm7IMSjhT/YeZ+d0IVWcqqzlWBXjT0clw57qsbzxQqmynpUuDquH0woT68XYiNhoUeQX0l/axwzMxq3c1wmKTCIu8CesTtpL2PgE/N7LNCJhRLKSmKxoWqgOnEVd4yUhbFs/M7LQbmkHB0upmdDvnMfZRxBeF1LVTbvkYopSTJFMysdGzQA5JSlk7OZuluzD9PGK9ZnilUdjhwLA113/eSfs3kPM3J3P8cSLIQOoCZdU517mbcI+lHhExoZ8IMlH+rdZBCfbSkk8zsD9ljCjOZJmFmO5TE6gwcCDyXIp7CCnrnUjL3EaGNKJX1zOyA2EUcC4v7JCtqSsq2yXQCNifhIEQzuyJWOxbWfz+13r0cvU3B5TrNRZ7il8eRhEVoBNwBXGqJ3vTlXscUY0Ak9SJUja1NWO3t7rj9A2Cmme1Vy3iZuE8AY4C7zWzTOLbmEDNLtmaypAcJcyw9YGHhpPWAa81sVKJ42ZH+iwnzaJ1pZveniNcaeUnBQfo2hdzFX82zzGwjwvoNKWMdRBhkNVCN13HoSUNdcS39GXgXeIiwJvSPCf/DfRLX939uZgskdZLUycwmSzovYTwIVSn/AtaRdA2ht85hqYJlR/p3VJ4pOAi/xNoVM1siabYyCyUl9CAwjzBTabaefyFp2qG+nOnNdWmM3T+Hkb6FuY/uI/3cRwCY2Z2SphNmZxVhOdUUU9h/s5l0pJwvq1XxTKEFJP3WzH7Y/CNbN5Us7GNm78T97WZhn2hVwtxAj5L58qr1YDILi9y8LOlaQvXNu7U8fxnZ3lxLJM3NaeqHPQmT751EmM2zF5B0ASVJfyPMInqbmaXMgArrluQ2iWKs3hxFqAaE0Ij+aKrqzWp5m0ILSHrFzPrXOx3LS61gYZ88SNq+3H4zuydRvF8SGnofAy4H7kjxAZe0hIZMLjufVJLeXJm5qxrtjn8XAS8CPzazSbWMG2NvTxg9/XVgKnAdcHuqTDBOojjWSiZRNLNdaxxnF+AiwrKphbmk+hFG/R9nZnfWMl6L0uaZQvUkvWpm69Q7HctL0lQzG1nhWHGgmWu5+OtvF0LvtRHA9cBl7aj01UhsuxkMXGNmg5t7/HLGGUNoQ9ktVTfmvCZRlPQM8DUzm1OyfyDwj1rHawmvPipR0iWt0SHaT4Ns7yaO9cgrEamV/MLtBnQFPko4LgIzM0lvEKaBWEyowrpR0l1mdkqquPViZkuAJ+IAviQUFg/6BqHEsBlh2pRUJkm6g8aTKN6dIE4XYG6Z/a8R3qd145nC0qbT0CWt1Odl9rVFdVnYJ29m1rNwP/6C34t0y6oWxiQcCrxNWOf3ZDP7PP7afB5od5lCgZmNT3FeSdcT6t3/BVwI3GNx4asUzOx7JZMoXpJoEsXLgakKa1G/GvetQ6h+vCxBvKp59VEHJKkvcDPwGWUW9qn34JmUUowbyJz7F4QZYJcaCCjpK2aWcgWvdknSroRxEUtyjNmXkBEZoeH3rURxvkL4oZJtaL4tz/mkyvFMoYQ60NTZquPCPnko6WZYmBtoezOr6Uy3ajw1+JOENoRcpgZvrySNMbN/V+oqmqqLqMJKb/8HTCHUFmxLKPHdmCJea+SZQglJXwBPEaoAoHE1kqWaLdHVnqTs2reFuYH+VOtffpIm0jA1+NeAl80sr6nB2yVJvzCzM0r+hwVmZkckivsEsHPhPSKpD6GkMixFvApp+LmZ/TyveKW8TWFp3wf2JaxFex1ws9VxKmS3XC4tjMUoiGMxal0dsHFmMNllpF0nuUMwszPi3TPN7D/ZY7GHTiqdSn40LCCUMvNU13a9vC+21TOz88xsG+AEQsPPJEnXSxpe35S5ZVCuR0yKXjKNpgZPcP6O7K9l9qWsyvmXpDskHSbpMODvwD8TxluKmdV80saW8JJCBWb2kqRbCV00vw1sCMyoa6JcVSRtRRiV2kdhsfmCXkCKGVvrNTV4uyVpI8Lsq6uUtCv0IjNtd62Z2cmSvkWYYwnS9T6qSNLPrPbT11fNM4USkr5M6Ba2F6Gr2HWEUb6f1DVhriW6ERYr6UKYlK7gA0LVYE1Z/aYGb88GAXsQxtRkR9gvJAxgS8bM/irpLuL3o6TVClPB5ORIoG6Zgjc0l4gNzTMJUxJ/QMnwfjM7tx7pci0nad1y3UNd2yFpKzN7KMd43yXM57QI+IKG0t6Xaxzng0qHgB5mVrcf7N6msLRfEPrwf0H4tdmz5Obajksl9S5sSFo1jlZ1bccxZf6HlyeM90NgsJkNMLMvm9nAWmcI0XvABmbWq+TWkzDrbd149dHSFpjZhfVOhKuJNczsvcKGmb2rBGs0u6SGlvkfJhl8GL1ImFwwtasIK9e9WebYX3KIX5FnCks7gjCc3rV9X2TXU5C0LmnXTHa110nSqoXpyOPcZCm/t04HHpT0CPBpYaeZnVjLIGb2kyaOnVrLWC3lmYJrz34M3C/pHhpGpx5d3yS5Fvod8JCkG+L2fsCvEsYbT1hL4UlCFXKH4w3NJSQtpnzx0bsXtkGS1qBhEryHLcGqXS4tSRsTps0G+HfKuYFSzo3VVnimUMLfFO2HpO3K7Teze/NOi1t2krYhNMpeEaedWLl0lHMNY/2aMB3K32hcfZRnl9S68kyhhGcK7UdcyrGgO2Hmy+k+f1XbIekMwkSGg8xsQ0lrATeY2dbNPHVZ45XLbGreJbWZNNR1hUdvU1jaDc0/xLUFVrKsqKR1gPPqkxq3jPYBNiUscYqZvS4pWddwM0s5r1K16rqYl2cKS1tT0vmVDta6F4LL1VygbsscumXyWVzNzgAkrVTvBOWgrtU3niksbVrm/i+AMyo90LVucYnIwgesEzCc+IvTtRnXSxoP9JZ0FKHL+J+aeU6rVzInV6NDhEGzdeNtCk3w9oW2TdLYzOZiYE7pVNqu9ZO0M7AL4QvzDjO7q85JWm6xraQiM/tFXmkp5ZlCEyQ9ZmZNrsTmWidJnYGrzOzgeqfFtT2SVibMjPxSdkR1R+DVR65dMrMlktaV1M3MPqt3elzLSLrfzLaRtJDydewLgP8zs4tqFO8iMzsu3t+GMNXEi8D6kr5rZv+oRZxMvIrtllDftksvKZQoeROuSMNANh+81sZIuorQsHwb8FFhv8902/ZJWh140MwG1eh8xVoBSZOBH5jZY3Eq/evNbEQt4mTifUZY9vd64HVKehyZ2ZW1jNcSXlIoEWcpdO3Di/HWiYYZbv1XUBsjaTNgG8L/7n4ze9zMFkganShkLzMrdIF9SVKK2aTXJEzZcQChvWsicGNrqKrykoJrtyTtZ2Y3NLfPtV6Sfkb48rwp7tqbMHjtlzWO8zHwAuEX+wCgf5yRtRMw08wG1zJeSex+hIW9vg+camZ/ThWrqvR4puDaq3IdBbzzQNsiaTYwzMwWxe0ewIxaVRtl4qxbsmuemX0W587azsxuKve8GsTdDDgI2BmYDvwu5dxO1fDqI9fuSPoasDuwdkmDXi9CUd21Ha8TpihZFLdXAF6rdZBKK/TFCRRrniFIOhP4OvAMYcnf082sVbw3vaTg2h1JwwgD1c4EfpY5tBCYXJib37VemYGH/YGRwF1xe2fgUTP7Zo3jPUn59qZCB5OhNY73BfAfGjqyFGInidcSnim4dktSVzP7XFJXYDDwmpm9Ve90ueaVDDxcSq1755SpPiqNV9O1vvOO1xKeKbh2R9IfgQvMbJakVYCHgCXAasAPzezauibQVU1Sd2D9uPlCoW0hUazOwN1mtkOqGJlYxRUByxzb1szuS52GSlJ0tXKu3rY1s1nx/uHAc2Y2BNgcOKV+yXLVktRF0m8IkxheSVjT+FVJv4klv5ozsyWEJVxXSXH+ElMknRIzIgAk9ZV0NfD7HOJX5JmCa4+yI5h3Bm4BMLM36pIatyz+j1CyG2hmm8ceY+sBvYHfJoz7IfCkpMsknV+4JYizOeF6ZkgaI+kk4FFCqXZUgnhV8+oj1+7EEam/I/RSmQxsZGZvSOoCPGVmG9U1ga5Zkp4HNrSSL6j4y/pZM9sgUdyybRmpRhjHzOD3hF5WW5rZ3BRxWsK7pLr26LvA+cCXgHGZEsKOwN/rlirXElaaIcSdSwprKyQKemUcC9HfzGaniiOpN3AOsAWwG6EL9T8lnWRm/04Vt6q0eUnBOdfaSLoFuMnMrirZfwiwv5ntmSjuNwjVU93MbKCk4cCZtY4n6SXgIuC8wviEGOsi4GUzO6iW8VqUNs8UXHslaUPgYqCvmQ2WNBTYs9ZTJLjak7Q2YdDYJ4SRvhDWau4B7GNmNR/AFuNOB8YAUwprqUh6qtbTXEjqV6mqSNJRZla3hYQ8U3DtlqR7gJOB8Sk/4C4dSWOATeLm02Y2KXG8h81sy+wCW5Jm1nMwWd68TcG1Zyua2aNSo1mJW8VUAq46sX49zzr2WZL+G+gsaQPgRODBHOPXnXdJde3Z25LWI04hIGlfYF59k+RauRMIJZNPCQvtvA+Mq2eC8uYlBdeeHQ9cAmwk6TXCXDO+PKdbShw5fQxh9PSTwFatZYK6vHmbgmv3JK1EKBV/DBxoZtfUOUmulZE0EfgcuA/4GjDHzMbVNVF14pmCa3ck9SKUEtYGbgXujts/ICyYslcdk+daIUlPxqlQiIMcH+2o62549ZFrj/4MvEuYMuAo4MeEKYn3MbMZdUyXa70+L9wxs8UlnRM6FC8puHan5FdfZ0Ljcv+UM2y6tk3SEuCjwiZhPMTHNKxv0KteacublxRce5T91bdE0lzPEFxTzKxz84/qGLyk4Nod/9Xn3LLzTME551yRD15zzjlX5JmCc865Is8UnHPOFXmm4JxzrsgzBeecc0X/H9sRjZFUhN64AAAAAElFTkSuQmCC\n", + "text/plain": [ + "<Figure size 432x288 with 1 Axes>" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "values_ok = [guesses_ok[i] for i in range(len(CODE_TYPES))]\n", + "values_bad = [guesses_bad[i] for i in range(len(CODE_TYPES))]\n", + "x_positions = np.arange(len(CODE_TYPES))\n", + "\n", + "p1 = plt.bar(x_positions, guesses_ok)\n", + "p2 = plt.bar(x_positions, guesses_bad, bottom=guesses_ok)\n", + "\n", + "plt.ylabel('Tipo de código')\n", + "plt.title('Aciertos o no, por tipo de código')\n", + "plt.xticks(x_positions, CODE_TYPES, rotation=90)\n", + "#plt.yticks(np.arange(0, 81, 10))\n", + "plt.legend((p1[0], p2[0]), ('Correcto', 'Incorrecto'))\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/Master/texmf-dist/doc/latex/jupynotex/tests/run b/Master/texmf-dist/doc/latex/jupynotex/tests/run new file mode 100755 index 00000000000..a5fa69f44d2 --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/tests/run @@ -0,0 +1,3 @@ +#!/bin/sh + +PYTHONPATH=. fades -d pytest -x pytest -sv "$@" diff --git a/Master/texmf-dist/doc/latex/jupynotex/tests/test_cellparser.py b/Master/texmf-dist/doc/latex/jupynotex/tests/test_cellparser.py new file mode 100644 index 00000000000..f9108f4bb8f --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/tests/test_cellparser.py @@ -0,0 +1,79 @@ +# Copyright 2020 Facundo Batista +# All Rights Reserved +# Licensed under Apache 2.0 + +import pytest +import re + +from jupynotex import _parse_cells + + +def test_empty(): + msg = "Empty cells spec not allowed" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('', 100) + + +def test_simple(): + r = _parse_cells('1', 100) + assert r == [1] + + +def test_several_comma(): + r = _parse_cells('1,3,5,9,7', 100) + assert r == [1, 3, 5, 7, 9] + + +def test_several_range(): + r = _parse_cells('1-9', 100) + assert r == [1, 2, 3, 4, 5, 6, 7, 8, 9] + + +def test_several_limited(): + msg = "Notebook loaded of len 3, smaller than requested cells: [1, 2, 3, 4]" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('1-4', 3) + + +def test_range_default_start(): + r = _parse_cells('-3', 8) + assert r == [1, 2, 3] + + +def test_range_default_end(): + r = _parse_cells('5-', 8) + assert r == [5, 6, 7, 8] + + +def test_not_int(): + msg = "Found forbidden characters in cells definition (allowed digits, '-' and ',')" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('1,a', 3) + + +def test_not_positive(): + msg = "Cells need to be >=1" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('3,0', 3) + + +def test_several_mixed(): + r = _parse_cells('1,3,5-7,2,9,11-13', 80) + assert r == [1, 2, 3, 5, 6, 7, 9, 11, 12, 13] + + +def test_overlapped(): + r = _parse_cells('3,5-7,6-9,8', 80) + assert r == [3, 5, 6, 7, 8, 9] + + +def test_bad_range_equal(): + msg = "Range 'from' need to be smaller than 'to' (got '12-12')" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('12-12', 80) + + +def test_bad_range_smaller(): + msg = "Range 'from' need to be smaller than 'to' (got '3-2')" + with pytest.raises(ValueError, match=re.escape(msg)): + _parse_cells('3-2', 80) diff --git a/Master/texmf-dist/doc/latex/jupynotex/tests/test_main.py b/Master/texmf-dist/doc/latex/jupynotex/tests/test_main.py new file mode 100644 index 00000000000..fabfa4e01eb --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/tests/test_main.py @@ -0,0 +1,111 @@ +# Copyright 2020 Facundo Batista +# All Rights Reserved +# Licensed under Apache 2.0 + +import textwrap + +import jupynotex +from jupynotex import main + + +class FakeNotebook: + """Fake notebook. + + The instance supports calling (as it if were instantiated). The .get will return the + value in a dict for received key; raise it if exception. + """ + + def __init__(self, side_effects): + self.side_effects = side_effects + + def __call__(self, path): + return self + + def __len__(self): + return len(self.side_effects) + + def get(self, key): + """Return or raise the stored side effect.""" + value = self.side_effects[key] + if isinstance(value, Exception): + raise value + else: + return value + + +def test_simple_ok(monkeypatch, capsys): + fake_notebook = FakeNotebook({ + 1: ("test cell content up", "test cell content down"), + }) + monkeypatch.setattr(jupynotex, 'Notebook', fake_notebook) + + main('boguspath', '1') + expected = textwrap.dedent("""\ + \\begin{tcolorbox}[title=Cell {01}] + test cell content up + \\tcblower + test cell content down + \\end{tcolorbox} + """) + assert expected == capsys.readouterr().out + + +def test_simple_only_first(monkeypatch, capsys): + fake_notebook = FakeNotebook({ + 1: ("test cell content up", ""), + }) + monkeypatch.setattr(jupynotex, 'Notebook', fake_notebook) + + main('boguspath', '1') + expected = textwrap.dedent("""\ + \\begin{tcolorbox}[title=Cell {01}] + test cell content up + \\end{tcolorbox} + """) + assert expected == capsys.readouterr().out + + +def test_simple_error(monkeypatch, capsys): + fake_notebook = FakeNotebook({ + 1: ValueError("test problem"), + }) + monkeypatch.setattr(jupynotex, 'Notebook', fake_notebook) + + main('boguspath', '1') + + # verify the beginning and the end, as the middle part is specific to the environment + # where the test runs + expected_ini = [ + r"\begin{tcolorbox}[colback=red!5!white,colframe=red!75!,title={ERROR when parsing cell 1}]", # NOQA + r"\begin{verbatim}", + r"Traceback (most recent call last):", + ] + expected_end = [ + r"ValueError: test problem", + r"\end{verbatim}", + r"\end{tcolorbox}", + ] + out = [line for line in capsys.readouterr().out.split('\n') if line] + assert expected_ini == out[:3] + assert expected_end == out[-3:] + + +def test_multiple(monkeypatch, capsys): + fake_notebook = FakeNotebook({ + 1: ("test cell content up", "test cell content down"), + 2: ("test cell content ONLY up", ""), + }) + monkeypatch.setattr(jupynotex, 'Notebook', fake_notebook) + + main('boguspath', '1-2') + expected = textwrap.dedent("""\ + \\begin{tcolorbox}[title=Cell {01}] + test cell content up + \\tcblower + test cell content down + \\end{tcolorbox} + \\begin{tcolorbox}[title=Cell {02}] + test cell content ONLY up + \\end{tcolorbox} + """) + assert expected == capsys.readouterr().out diff --git a/Master/texmf-dist/doc/latex/jupynotex/tests/test_notebook.py b/Master/texmf-dist/doc/latex/jupynotex/tests/test_notebook.py new file mode 100644 index 00000000000..bf6511cbd30 --- /dev/null +++ b/Master/texmf-dist/doc/latex/jupynotex/tests/test_notebook.py @@ -0,0 +1,237 @@ +# Copyright 2020 Facundo Batista +# All Rights Reserved +# Licensed under Apache 2.0 + +import base64 +import json +import os +import pathlib +import re +import tempfile +import textwrap + +import pytest + +from jupynotex import Notebook + + +@pytest.fixture +def notebook(): + _, name = tempfile.mkstemp() + + def _f(cells): + with open(name, 'wt', encoding='utf8') as fh: + json.dump({'cells': cells}, fh) + + return Notebook(name) + + yield _f + os.unlink(name) + + +def test_empty(notebook): + nb = notebook([]) + assert len(nb) == 0 + + +def test_source_code(notebook): + rawcell = { + 'cell_type': 'code', + 'source': ['line1\n', ' line2\n'], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + src, _ = nb.get(1) + expected = textwrap.dedent("""\ + \\begin{verbatim} + line1 + line2 + \\end{verbatim} + """) + assert src == expected + + +def test_source_markdown(notebook): + rawcell = { + 'cell_type': 'markdown', + 'source': ['line1\n', ' line2\n'], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + src, _ = nb.get(1) + expected = textwrap.dedent("""\ + \\begin{verbatim} + line1 + line2 + \\end{verbatim} + """) + assert src == expected + + +def test_output_missing(notebook): + rawcell = { + 'cell_type': 'code', + 'source': [], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + assert out is None + + +def test_output_simple_executeresult_plain(notebook): + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'execute_result', + 'data': { + 'text/plain': ['default always present', 'line2'], + }, + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + expected = textwrap.dedent("""\ + \\begin{verbatim} + default always present + line2 + \\end{verbatim} + """) + assert out == expected + + +def test_output_simple_executeresult_latex(notebook): + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'execute_result', + 'data': { + 'text/latex': ['some latex line', 'latex 2'], + 'text/plain': ['default always present'], + }, + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + expected = textwrap.dedent("""\ + some latex line + latex 2 + """) + assert out == expected + + +def test_output_simple_executeresult_image(notebook): + raw_content = b"\x01\x02 asdlklda3wudghlaskgdlask" + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'execute_result', + 'data': { + 'image/png': base64.b64encode(raw_content).decode('ascii'), + 'text/plain': ['default always present'], + }, + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + m = re.match(r'\\includegraphics\{(.+)\}\n', out) + assert m + (fpath,) = m.groups() + assert pathlib.Path(fpath).read_bytes() == raw_content + + +def test_output_simple_stream(notebook): + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'stream', + 'text': ['some text line', 'text 2'], + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + expected = textwrap.dedent("""\ + \\begin{verbatim} + some text line + text 2 + \\end{verbatim} + """) + assert out == expected + + +def test_output_simple_display_data(notebook): + raw_content = b"\x01\x02 asdlklda3wudghlaskgdlask" + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'display_data', + 'data': { + 'image/png': base64.b64encode(raw_content).decode('ascii'), + }, + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + m = re.match(r'\\includegraphics\{(.+)\}\n', out) + assert m + (fpath,) = m.groups() + assert pathlib.Path(fpath).read_bytes() == raw_content + + +def test_output_multiple(notebook): + rawcell = { + 'cell_type': 'code', + 'source': [], + 'outputs': [ + { + 'output_type': 'execute_result', + 'data': { + 'text/latex': ['some latex line', 'latex 2'], + }, + }, { + 'output_type': 'stream', + 'text': ['some text line', 'text 2'], + }, + ], + } + nb = notebook([rawcell]) + assert len(nb) == 1 + + _, out = nb.get(1) + expected = textwrap.dedent("""\ + some latex line + latex 2 + \\begin{verbatim} + some text line + text 2 + \\end{verbatim} + """) + assert out == expected diff --git a/Master/texmf-dist/tex/latex/jupynotex/jupynotex.py b/Master/texmf-dist/tex/latex/jupynotex/jupynotex.py new file mode 100644 index 00000000000..036be088ce5 --- /dev/null +++ b/Master/texmf-dist/tex/latex/jupynotex/jupynotex.py @@ -0,0 +1,166 @@ +# Copyright 2020 Facundo Batista +# All Rights Reserved +# Licensed under Apache 2.0 + +"""USAGE: jupynote.py notebook.ipynb cells + + cells is a string with which cells to include, separate groups + with comma, ranges with dash (with defaults to start and end. +""" + +import base64 +import json +import sys +import tempfile +import traceback + + +def _verbatimize(lines): + """Wrap a series of lines around a verbatim indication.""" + result = [r"\begin{verbatim}"] + for line in lines: + result.append(line.rstrip()) + result.append(r"\end{verbatim}") + return result + + +def _save_content(data): + """Save the received b64encoded data to a temp file.""" + _, fname = tempfile.mkstemp(suffix='.png') + with open(fname, 'wb') as fh: + fh.write(base64.b64decode(data)) + return fname + + +class Notebook: + """The notebook converter to latex.""" + + def __init__(self, path): + with open(path, 'rt', encoding='utf8') as fh: + nb_data = json.load(fh) + + self._cells = nb_data['cells'] + + def __len__(self): + return len(self._cells) + + def _proc_src(self, content): + """Process the source of a cell.""" + source = content['source'] + result = [] + if content['cell_type'] == 'code': + result.extend(_verbatimize(source)) + elif content['cell_type'] == 'markdown': + # XXX: maybe we could parse this? + result.extend(_verbatimize(source)) + else: + raise ValueError( + "Cell type not supported when processing source: {!r}".format( + content['cell_type'])) + + return '\n'.join(result) + '\n' + + def _proc_out(self, content): + """Process the output of a cell.""" + outputs = content.get('outputs') + if not outputs: + return + + result = [] + for item in outputs: + output_type = item['output_type'] + if output_type == 'execute_result': + data = item['data'] + if 'image/png' in data: + fname = _save_content(data['image/png']) + result.append(r"\includegraphics{{{}}}".format(fname)) + elif 'text/latex' in data: + result.extend(data["text/latex"]) + else: + result.extend(_verbatimize(data["text/plain"])) + elif output_type == 'stream': + result.extend(_verbatimize(x.rstrip() for x in item["text"])) + elif output_type == 'display_data': + data = item['data'] + fname = _save_content(data['image/png']) + result.append(r"\includegraphics{{{}}}".format(fname)) + else: + raise ValueError("Output type not supported in item {!r}".format(item)) + + return '\n'.join(result) + '\n' + + def get(self, cell_idx): + """Return the content from a specific cell in the notebook. + + The content is already splitted in source and output, and converted to latex. + """ + content = self._cells[cell_idx - 1] + source = self._proc_src(content) + output = self._proc_out(content) + return source, output + + +def _parse_cells(spec, maxlen): + """Convert the cells spec to a range of ints.""" + if not spec: + raise ValueError("Empty cells spec not allowed") + if set(spec) - set('0123456789-,'): + raise ValueError( + "Found forbidden characters in cells definition (allowed digits, '-' and ',')") + + cells = set() + groups = spec.split(',') + for group in groups: + if '-' in group: + cfrom, cto = group.split('-') + cfrom = 1 if cfrom == '' else int(cfrom) + cto = maxlen if cto == '' else int(cto) + if cfrom >= cto: + raise ValueError( + "Range 'from' need to be smaller than 'to' (got {!r})".format(group)) + cells.update(range(cfrom, cto + 1)) + else: + cells.add(int(group)) + cells = sorted(cells) + + if any(x < 1 for x in cells): + raise ValueError("Cells need to be >=1") + if maxlen < cells[-1]: + raise ValueError( + "Notebook loaded of len {}, smaller than requested cells: {}".format(maxlen, cells)) + + return cells + + +def main(notebook_path, cells_spec): + """Main entry point.""" + nb = Notebook(notebook_path) + cells = _parse_cells(cells_spec, len(nb)) + + for cell in cells: + try: + src, out = nb.get(cell) + except Exception: + title = "ERROR when parsing cell {}".format(cell) + print( + r"\begin{{tcolorbox}}" + r"[colback=red!5!white,colframe=red!75!,title={{{}}}]".format(title)) + tb = traceback.format_exc() + print('\n'.join(_verbatimize(tb.split('\n')))) + print(r"\end{tcolorbox}") + continue + + print(r"\begin{{tcolorbox}}[title=Cell {{{:02d}}}]".format(cell)) + print(src) + if out: + print(r"\tcblower") + print(out) + print(r"\end{tcolorbox}") + + +if __name__ == "__main__": + if len(sys.argv) != 3: + print(__doc__) + exit() + + main(*sys.argv[1:3]) diff --git a/Master/texmf-dist/tex/latex/jupynotex/jupynotex.sty b/Master/texmf-dist/tex/latex/jupynotex/jupynotex.sty new file mode 100644 index 00000000000..205343d0974 --- /dev/null +++ b/Master/texmf-dist/tex/latex/jupynotex/jupynotex.sty @@ -0,0 +1,7 @@ +\ProvidesPackage{jupynotex}[0.1] + +\usepackage{tcolorbox} + +\newcommand{\jupynotex}[2][-]{ + \input|"python3 jupynotex.py #2 #1" +} diff --git a/Master/tlpkg/bin/tlpkg-ctan-check b/Master/tlpkg/bin/tlpkg-ctan-check index 29dca97cd49..fae90c83353 100755 --- a/Master/tlpkg/bin/tlpkg-ctan-check +++ b/Master/tlpkg/bin/tlpkg-ctan-check @@ -402,7 +402,7 @@ my @TLP_working = qw( jbact jfmutil jigsaw jknapltx jkmath jlabels jlreq jlreq-deluxe jmb jmlr jneurosci jnuexam josefin jpsj jsclasses - jslectureplanner jumplines junicode + jslectureplanner jumplines junicode jupynotex jura juraabbrev jurabib juramisc jurarsp js-misc jvlisting kalendarium kanaparser kantlipsum karnaugh karnaugh-map karnaughmap kastrup kblocks kdgdocs kerkis kerntest ketcindy diff --git a/Master/tlpkg/libexec/ctan2tds b/Master/tlpkg/libexec/ctan2tds index f46a9950790..6fd85e92f79 100755 --- a/Master/tlpkg/libexec/ctan2tds +++ b/Master/tlpkg/libexec/ctan2tds @@ -1934,6 +1934,7 @@ $standardtex 'jadetex', '\.ltx|\.def|\.tex|\.ini|\.sty|\.fd', 'js-misc', '(cassette|idverb|js-misc|schild|sperr|xfig)\.tex', 'jslectureplanner', '\.lps|' . $standardtex, + 'jupynotex', '\.py|' . $standardtex, 'kanaparser', 'kanaparser.(tex|lua)$', 'karnaugh', 'kvmacros.tex', 'kastrup', 'binhex.tex|' . $standardtex, diff --git a/Master/tlpkg/tlpsrc/collection-mathscience.tlpsrc b/Master/tlpkg/tlpsrc/collection-mathscience.tlpsrc index f82def42c41..e1d4a3bfd02 100644 --- a/Master/tlpkg/tlpsrc/collection-mathscience.tlpsrc +++ b/Master/tlpkg/tlpsrc/collection-mathscience.tlpsrc @@ -109,6 +109,7 @@ depend interval depend ionumbers depend isomath depend jkmath +depend jupynotex depend karnaugh depend karnaugh-map depend karnaughmap diff --git a/Master/tlpkg/tlpsrc/jupynotex.tlpsrc b/Master/tlpkg/tlpsrc/jupynotex.tlpsrc new file mode 100644 index 00000000000..e69de29bb2d --- /dev/null +++ b/Master/tlpkg/tlpsrc/jupynotex.tlpsrc |