Prechádzať zdrojové kódy

工具docker封装完成

ysyyhhh 5 mesiacov pred
commit
310149f7b3
100 zmenil súbory, kde vykonal 31995 pridanie a 0 odobranie
  1. 4 0
      .gitignore
  2. 4 0
      README.md
  3. 23 0
      docker-compose.yml
  4. 63 0
      server/Dockerfile
  5. 131 0
      server/LEMON-master/.gitignore
  6. 201 0
      server/LEMON-master/LICENSE
  7. 222 0
      server/LEMON-master/README.md
  8. 47 0
      server/LEMON-master/config/demo.conf
  9. 43 0
      server/LEMON-master/config/experiments.conf
  10. 5035 0
      server/LEMON-master/dataset/DIS.csv
  11. 5002 0
      server/LEMON-master/dataset/sinewave.csv
  12. 7 0
      server/LEMON-master/lemon_requirements.txt
  13. 0 0
      server/LEMON-master/local_history.patch
  14. BIN
      server/LEMON-master/res.npy
  15. 172 0
      server/LEMON-master/run/SVNH_DatasetUtil.py
  16. 0 0
      server/LEMON-master/run/__init__.py
  17. 550 0
      server/LEMON-master/run/api_config_pool.json
  18. BIN
      server/LEMON-master/run/data/SVHN_test_32x32.mat
  19. BIN
      server/LEMON-master/run/data/SVHN_train_32x32.mat
  20. 0 0
      server/LEMON-master/run/data/a.txt
  21. BIN
      server/LEMON-master/run/data/adv_image/bim_mnist_image.npy
  22. BIN
      server/LEMON-master/run/data/adv_image/bim_mnist_label.npy
  23. BIN
      server/LEMON-master/run/data/combined_data/fashion_combined_10000_image.npy
  24. BIN
      server/LEMON-master/run/data/combined_data/fashion_combined_10000_label.npy
  25. BIN
      server/LEMON-master/run/data/t10k-images-idx3-ubyte.gz
  26. BIN
      server/LEMON-master/run/data/t10k-labels-idx1-ubyte.gz
  27. 34 0
      server/LEMON-master/run/localization_executor.py
  28. 202 0
      server/LEMON-master/run/localize_lemon.py
  29. 438 0
      server/LEMON-master/run/model2json.py
  30. 143 0
      server/LEMON-master/run/model_to_txt.py
  31. 640 0
      server/LEMON-master/run/mutate_lemon.py
  32. 146 0
      server/LEMON-master/run/mutation_executor.py
  33. 123 0
      server/LEMON-master/run/patch_hidden_output_extractor.py
  34. 134 0
      server/LEMON-master/run/patch_prediction_extractor.py
  35. 280 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARem1.json
  36. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARem5.json
  37. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep14.json
  38. 317 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep2.json
  39. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep8.json
  40. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF1.json
  41. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF10.json
  42. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF11.json
  43. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF2.json
  44. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF9.json
  45. 335 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LA1.json
  46. 298 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC1.json
  47. 298 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC4.json
  48. 298 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC6.json
  49. 376 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-MLA3.json
  50. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI1.json
  51. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI2.json
  52. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI9.json
  53. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NEB3.json
  54. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NS4.json
  55. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1-WS1.json
  56. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep1.json
  57. 280 0
      server/LEMON-master/run/sub_model/svhn_origin0-ARep10.json
  58. 257 0
      server/LEMON-master/run/sub_model/svhn_origin0-GF4.json
  59. 298 0
      server/LEMON-master/run/sub_model/svhn_origin0-LA2.json
  60. 303 0
      server/LEMON-master/run/sub_model/svhn_origin0-LA3.json
  61. 294 0
      server/LEMON-master/run/sub_model/svhn_origin0-LA5.json
  62. 292 0
      server/LEMON-master/run/sub_model/svhn_origin0-LA7.json
  63. 261 0
      server/LEMON-master/run/sub_model/svhn_origin0-LC3.json
  64. 261 0
      server/LEMON-master/run/sub_model/svhn_origin0-LC8.json
  65. 297 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARem3.json
  66. 297 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARep11.json
  67. 297 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARep16.json
  68. 297 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-ARem8.json
  69. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-ARep15.json
  70. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-GF13.json
  71. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-GF14.json
  72. 226 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-LR10.json
  73. 350 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-MLA9.json
  74. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-WS8.json
  75. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12.json
  76. 260 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF3.json
  77. 335 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LA4.json
  78. 301 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LA9.json
  79. 264 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC5.json
  80. 264 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC7.json
  81. 264 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC9.json
  82. 263 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARem7.json
  83. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep12.json
  84. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep13.json
  85. 286 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARem2.json
  86. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep4.json
  87. 286 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep5.json
  88. 286 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep6.json
  89. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep7.json
  90. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-GF15.json
  91. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-GF7.json
  92. 298 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-LA6.json
  93. 365 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-MLA7.json
  94. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NAI11.json
  95. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NAI6.json
  96. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NEB1.json
  97. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NEB2.json
  98. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NS5.json
  99. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NS6.json
  100. 249 0
      server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-WS4.json

+ 4 - 0
.gitignore

@@ -0,0 +1,4 @@
+/server/env/*.tar.gz
+/server/LEMON-master/origin_model/*.h5
+/server/LEMON-master/origin_model/*.hdf5
+/server/LEMON-master/dataset/*.npz

+ 4 - 0
README.md

@@ -0,0 +1,4 @@
+
+需要根据.gitignore 下载对应被忽略的大文件
+- env
+- model

+ 23 - 0
docker-compose.yml

@@ -0,0 +1,23 @@
+version: '3.8'
+
+services:
+  backend:
+    build: ./server
+    restart: always
+    ports:
+      - 8080
+    volumes:
+      - ./server/output:/output
+    deploy:
+      resources:
+        reservations:
+          devices:
+            - driver: "nvidia"
+              count: "all"
+              capabilities: [ "gpu" ]
+    networks:
+      - ma-network
+    tty: true
+networks:
+  ma-network:
+    driver: bridge

+ 63 - 0
server/Dockerfile

@@ -0,0 +1,63 @@
+# 创建一个基础镜像 
+FROM nvidia/cuda:11.2.2-cudnn8-devel-ubuntu20.04
+
+RUN apt-key del 7fa2af80
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/3bf863cc.pub
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/7fa2af80.pub
+
+# 重置apt-get
+RUN rm -rf /etc/apt/sources.list
+
+RUN echo "deb http://mirrors.tuna.tsinghua.edu.cn/ubuntu/ bionic main restricted universe multiverse" >> /etc/apt/sources.list
+
+RUN apt-get update
+
+RUN apt-get install -y default-jre
+
+RUN apt-get install -y cmake
+
+# 安装conda
+# yhyu13 : install additional packages
+# 设置apt的源为tsinghua镜像源
+RUN sed -i 's/archive.ubuntu.com/mirrors.tuna.tsinghua.edu.cn/g' /etc/apt/sources.list
+
+RUN apt-get update && apt-get install -y curl wget
+
+# 安装conda
+RUN curl -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \
+    && bash Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda \
+    && rm Miniconda3-latest-Linux-x86_64.sh
+
+RUN /opt/conda/bin/conda init bash
+
+RUN mkdir env
+
+# 解压env中的mxnet tensorflow lemon
+COPY env/mxnet.tar.gz env/mxnet.tar.gz
+RUN mkdir /opt/conda/envs/mxnet
+RUN tar -zxvf env/mxnet.tar.gz -C /opt/conda/envs/mxnet
+
+COPY env/tensorflow.tar.gz env/tensorflow.tar.gz
+RUN mkdir /opt/conda/envs/tensorflow
+RUN tar -zxvf env/tensorflow.tar.gz -C /opt/conda/envs/tensorflow
+
+COPY env/lemon.tar.gz env/lemon.tar.gz
+RUN mkdir /opt/conda/envs/lemon
+RUN tar -zxvf env/lemon.tar.gz -C /opt/conda/envs/lemon
+
+RUN rm -rf env
+
+
+# 复制项目文件
+WORKDIR /app
+
+COPY LEMON-master /app/LEMON-master
+
+RUN echo "conda activate lemon" >> ~/.bashrc
+
+# RUN conda install -n lemon cudatoolkit=10.1
+# 随便启动一个进程空转
+CMD ["/bin/bash"]
+# CMD ["tail", "-f", "/dev/null"]
+
+

+ 131 - 0
server/LEMON-master/.gitignore

@@ -0,0 +1,131 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+.idea/

+ 201 - 0
server/LEMON-master/LICENSE

@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2020 The Authors: Zan Wang, Ming Yan, Junjie Chen, Shuang Liu, Dongdi Zhang.
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 222 - 0
server/LEMON-master/README.md

@@ -0,0 +1,222 @@
+# Deep Learning Library Testing via Effective Model Generation
+
+This is the implementation repository of our ESEC/FSE 2020 paper:  **Deep Learning Library Testing via Effective Model Generation.** 
+
+## Description
+
+`LEMON` is a novel approach to testing DL libraries by generating effective DL models via guided mutation. The goal is to test DL libraries as sufficiently as possible by exploring unused library code or different usage ways of library code. We further propose a heuristic strategy in `LEMON` to guide the process of model generation so as to generate models that can amplify the inconsistent degrees for real bugs. In this way, it is clearer to distinguish real bugs and uncertain impacts in DL libraries. We conducted an empirical study to evaluate the effectiveness of `LEMON` based on `20` release versions of `TensorFlow`,`Theano,` `CNTK`, and `MXNet`. `LEMON` detected `24` new bugs in the latest release versions of these libraries. The results also demonstrate that the models generated by `LEMON` outperform existing models and the models generated without guidance in terms of the number of unique bugs/inconsistencies and the achieved inconsistent degrees. 
+
+## Datasets/Models/Libraries
+
+### Datasets/Models
+
+We used `12` popular DL `models` based on `6` `datasets` including both images and sequential data, as the initial seed models in `LEMON`, which have been widely used in many existing studies.
+
+| Model       | Dataset              | Related link<sup>1</sup>                                     |
+| ----------- | -------------------- | ------------------------------------------------------------ |
+| AlexNet     | CIFAR-10             | [alexnet-cifar-10-keras-jupyter](https://github.com/toxtli/alexnet-cifar-10-keras-jupyter/blob/master/alexnet_test1.ipynb) |
+| LeNet5      | Fashion-MNIST        | [fashion_mnist_keras](https://colab.research.google.com/github/margaretmz/deep-learning/blob/master/fashion_mnist_keras.ipynb) |
+| LeNet5      | MNIST                | [lenet5-mnist](https://github.com/lucaaslb/lenet5-mnist)     |
+| LSTM-1      | Sine-Wave            | [LSTM-Neural-Network-for-Time-Series-Prediction](https://github.com/StevenZxy/CIS400/tree/f69489c0624157ae86b5d8ddb1fa99c89a927256/code/LSTM-Neural-Network-for-Time-Series-Prediction-master) |
+| LSTM-2      | Stock-Price          | [StockPricesPredictionProject](https://github.com/omerbsezer/LSTM_RNN_Tutorials_with_Demo/tree/master/StockPricesPredictionProject) |
+| ResNet50    | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| MobileNetV1 | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| InceptionV3 | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| DenseNet121 | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| VGG16       | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| VGG19       | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+| Xception    | ImageNet<sup>2</sup> | Keras applications tutorial<sup>3</sup>                      |
+
+1:  The first 5 models are trained using existing repositories while the last 7 models in ImageNet are obtained directly using the API provided by Keras.
+
+2: We sampled 1500 images from ImageNet and you could obtain them from `sampled_imagenet-1500.npz`. You can also sample your own images from the [ImageNet validation dataset](http://www.image-net.org/challenges/LSVRC/2012/nonpub-downloads). 
+
+3: Keras applications tutorial can be found in: https://keras.io/api/applications/
+
+4: All model files and two regression dataset and ImageNet dataset we sampled can be access in [OneDrive](https://1drv.ms/u/s!Aj6dGBsJFcs0jnXVUfAtsEjdUW_T?e=ezo32C)
+
+**NOTE**: LEMON use 6 dataset, and 3 of them could be directly obtained from their homepages  ( [CIFAR-10](http://www.cs.toronto.edu/~kriz/cifar.html),  [Fashion-MNIST](https://github.com/zalandoresearch/fashion-mnist), [MNIST](http://yann.lecun.com/exdb/mnist/) ) or accessed by [Keras API](https://keras.io/api/datasets/). We only upload the ImageNet dataset sampled by LEMON and two datasets collected from  GitHub.
+
+In `dataset.zip`, there are 3 files:
+
+> sinewave.csv: dataset of LSTM-1
+> DIS.csv : dataset of LSTM-2
+> sampled_imagenet-1500.npz:  dataset  samples by LEMON from  ImageNet 
+
+In `origin_model.zip`, there are 12 models used in LEMON. 
+
+
+### Libraries
+
+We used `20` release versions of  `4` widely-used DL `libraries`, i.e., `TensorFlow`, `CNTK`,`Theano`, and `MXNet`, as subjects to constructed five experiments (indexed `E1` to `E5` in Table) to conduct differential testing.
+
+We share the link of each library and docker image used in `LEMON`. 
+
+| Experiment ID | Tensorflow                                                | Theano                                          | CNTK                                                         | MXNet                                                      | CUDA                                                         |
+| ------------- | --------------------------------------------------------- | ----------------------------------------------- | ------------------------------------------------------------ | ---------------------------------------------------------- | ------------------------------------------------------------ |
+| E1            | [1.14.0](https://pypi.org/project/tensorflow-gpu/1.14.0/) | [1.0.4](https://pypi.org/project/Theano/1.0.4/) | [2.7.0](https://pypi.org/project/cntk-gpu/2.7/)              | [1.5.1](https://pypi.org/project/mxnet-cu101/1.5.1.post0/) | [10.1](https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=10.1-cudnn7-devel-ubuntu16.04) |
+| E2            | [1.13.1](https://pypi.org/project/tensorflow-gpu/1.13.1/) | [1.0.3](https://pypi.org/project/Theano/1.0.3/) | [2.6.0](https://pypi.org/project/cntk-gpu/2.6/)              | [1.4.1](https://pypi.org/project/mxnet-cu100/1.4.1/)       | [10.0](https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=10.0-cudnn7-devel-ubuntu16.04) |
+| E3            | [1.12.0](https://pypi.org/project/tensorflow-gpu/1.12.0/) | [1.0.2](https://pypi.org/project/Theano/1.0.2/) | [2.5.1](https://pypi.org/project/cntk-gpu/2.5.1/)            | [1.3.1](https://pypi.org/project/mxnet-cu90/1.3.1/)        | [9.0](https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=9.0-cudnn7-devel-ubuntu16.04) |
+| E4            | [1.11.0](https://pypi.org/project/tensorflow-gpu/1.11.0/) | [1.0.1](https://pypi.org/project/Theano/1.0.1/) | [2.4.0](https://docs.microsoft.com/en-us/cognitive-toolkit/Setup-Linux-Python?tabs=cntkpy24) | [1.2.1](https://pypi.org/project/mxnet-cu90/1.2.1.post1/)  | [9.0](https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=9.0-cudnn7-devel-ubuntu16.04) |
+| E5            | [1.10.0](https://pypi.org/project/tensorflow-gpu/1.10.0/) | [1.0.0](https://pypi.org/project/Theano/1.0.0/) | [2.3.1](https://docs.microsoft.com/en-us/cognitive-toolkit/Setup-Linux-Python?tabs=cntkpy231) | [1.1.0](https://pypi.org/project/mxnet-cu90/1.1.0/)        | [9.0](https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=9.0-cudnn7-devel-ubuntu16.04) |
+
+\* All libraries should be  `GPU-supported` version
+
+## Reproducibility
+
+### Environment 
+
+We conducted 5 experiments in `LEMON` of which the library and CUDA version information are as described above. In order to facilitate other researchers to reproduce `LEMON`, we provide a `docker`  image for the `E1` experiment. It can be easily obtained by the following command. (**Note: nvidia-docker2 is required!**) 
+
+**Step 0:** Please Install [nvidia-docker2](https://github.com/NVIDIA/nvidia-docker). You can use this [instruction](https://codepyre.com/2019/01/installing-nvidia-docker2-on-ubuntu-18.0.4/) to install it.
+
+**Step 1:** Clone the repository. Download the dataset and models from  [OneDrive](https://1drv.ms/u/s!Aj6dGBsJFcs0jnXVUfAtsEjdUW_T?e=ezo32C). Save the code and unzip datasets and models to `/your/local/path/` , e.g. `/your/local/path/origin_model` and `/your/local/path/dataset`. (`/your/local/path/` should be the absolute path on your server, e.g. `/home/user_xxx/`)
+
+**Step 2:** Using the following command to pull the docker image we released for `E1` and create a container for it. 
+
+```shell
+docker pull yenming1227/lemon:latest
+docker run --runtime=nvidia -it -v /your/local/path/:/data  yenming1227/lemon:latest /bin/bash
+```
+
+Then you will enter a container.(Remember to change `/your/local/path/` to the real path! ^_^)
+
+**Note: If your server is using http proxy, you should configure proxy in the container just as you did in your server before**
+
+**Step 3:** Create five virtual environments as shown below in your docker container.
+
+**Note: Please copy the installation command line by line to prevent some commands from being skipped.**
+
+ ```shell
+# tensorflow
+conda create -n tensorflow python=3.6
+source activate tensorflow
+pip install -r lemon_requirements.txt
+pip install keras==2.2.4
+pip install tensorflow-gpu==1.14.0
+source deactivate
+
+# theano
+conda create -n theano python=3.6
+source activate theano
+pip install -r lemon_requirements.txt
+conda install pygpu=0.7.6
+pip install keras==2.2.4
+pip install theano==1.0.4
+source deactivate
+
+# cntk
+conda create -n cntk python=3.6
+source activate cntk
+pip install -r lemon_requirements.txt
+pip install keras==2.2.4
+pip install cntk-gpu==2.7
+source deactivate
+
+# mxnet
+conda create -n mxnet python=3.6
+source activate mxnet
+pip install -r lemon_requirements.txt
+pip install keras-mxnet==2.2.4.2
+pip install mxnet-cu101==1.5.1.post0
+source deactivate
+
+# default lemon python
+conda create -n lemon python=3.6
+source activate lemon
+pip install -r lemon_requirements.txt
+pip install keras==2.2.4
+pip install tensorflow-gpu==1.14.0
+source deactivate
+ ```
+
+### Redis Startup
+
+LEMON uses redis to store intermediate outputs and exchange data between different processes. We have installed redis in our docker image, you can start it with the following command:
+
+```bash
+cd /root/redis-4.0.8/src
+./redis-server ../redis.conf
+```
+
+### Running LEMON
+
+The `LEMON` artifacts are well organized, and researchers can simply run `LEMON` with the following command. 
+
+**Note: We conducted five large scale experiments (generating 100 mutants for each of the 12 initial models and analyzing inconsistencies on 1500 inputs and locating bugs) and you can reproduce the bugs reported in `LEMON` by running tool with `experiments.conf`. However, it could not be completed within `48`  hours. Therefore, we provide a `demo run`, which can be completed within `1` hour if you running the tool with `demo.conf`.**
+
+```shell
+cd /LEMON
+git pull 
+source activate lemon
+```
+
+We really  recommend you use `git pull` to update `LEMON` to the latest version.
+
+**Mutation:**
+
+```shell
+python -u -m run.mutation_executor demo.conf
+```
+
+The above command shows how to generate mutants and calculating inconsistencies in `LEMON`. `demo.conf` is the configuration file we provided for `demo run`. 
+
+**Localization:**
+
+```shell
+python -u -m run.localization_executor demo.conf
+```
+
+This command shows the way to perform localization in `LEMON`. The final  bug reports will be stored in path `/data/lemon_outputs/bug_list.txt` 
+
+### Extension
+
+`LEMON` also supports researchers to switch to other models and datasets. You only need to focus on the code snippets of the data processing part in `DataUtils.get_data_by_exp` in `scripts/tools/utils.py`.
+
+```
+# TODO: Add your own data preprocessing here
+# Note: The returned inputs should be preprocessed and labels should decoded as one-hot vectors which could be directly feed in model. Both of them should be returned in batch, e.g. shape like (1500,28,28,1) and (1500,10)
+# 
+# elif 'xxx' in exp:
+#     x_test, y_test = get_your_data(dataset_dir)
+```
+
+Besides, you should name your model file in format ` NetworkName-DatasetName_origin.h5`, e.g.  `mobilenet.1.00.224-imagenet_origin.h5`. 
+
+Note: `_` and `-` can't be shown in `NetworkName`. You can replace them with `.` 
+
+For example , changing  `mobilenet_1.00_224-imagenet_origin.h5` to `mobilenet.1.00.224-imagenet_origin.h5`. 
+
+## Citation
+
+Please cite our paper if this work is helpful to you.
+```
+@inproceedings{DBLP:conf/sigsoft/WangYCLZ20,
+  author    = {Zan Wang and
+               Ming Yan and
+               Junjie Chen and
+               Shuang Liu and
+               Dongdi Zhang},
+  title     = {Deep learning library testing via effective model generation},
+  booktitle = {{ESEC/SIGSOFT} {FSE}},
+  pages     = {788--799},
+  publisher = {{ACM}},
+  year      = {2020}
+}
+```
+
+## Contact
+
+Authors information:
+
+| Name          | Email Address          | **Github id** |
+| ------------- | ---------------------- | ------------- |
+| Zan Wang      | wangzan@tju.edu.cn     | tjuwangzan    |
+| Ming Yan      | yanming@tju.edu.cn     | Jacob-yen     |
+| Junjie Chen * | junjiechen@tju.edu.cn  | JunjieChen    |
+| Shuang Liu    | shuang.liu@tju.edu.cn  | AbigailLiu    |
+| Dongdi Zhang  | zhangdongdi@tju.edu.cn | Dandy-John    |
+
+\* *corresponding author*
+
+

+ 47 - 0
server/LEMON-master/config/demo.conf

@@ -0,0 +1,47 @@
+[parameters]
+mutate_ops=WS GF NEB NAI NS ARem ARep LA LC LR LS MLA
+metrics=D_MAD
+#metrics=deepgini
+exps=mobilenet.1.00.224-imagenet
+# lexnet-cifar10 lenet5-fashion-mnist fashion2 svhn  lenet5-mnist alexnet-cifar10 mobilenet.1.00.224-imagenet vgg16-imagenet
+# Path of the initial models
+# Name model file as 'alexnet-cifar10_origin.h5'
+
+origin_model_dir=origin_model
+
+# Path of the ImageNet and regression dataset
+dataset_dir=dataset
+
+# Modifying the backends is not recommended.
+# There is some hard-code in the program about the backends
+backend=tensorflow mxnet
+#..python_prefix = /root/anaconda3/envs/
+python_prefix=/opt/conda/envs/
+
+output_dir = lemon_outputs
+mutate_num=20 
+
+test_size=10
+pool_size=50
+mutate_ratio=0.3
+gpu_ids = 0,1
+threshold = 0.4
+# minutes
+time_limit = 3
+# use MCMC for mutator selection
+mutator_strategy = MCMC
+
+# use Roulette for mutant selection
+mutant_strategy = Roulette
+
+# use counter,timing
+stop_mode=counter
+
+
+[redis]
+# your-redis-server
+host= 127.0.0.1
+# redis port
+port= 6379
+# db number
+redis_db= 0

+ 43 - 0
server/LEMON-master/config/experiments.conf

@@ -0,0 +1,43 @@
+[parameters]
+mutate_ops=WS GF NEB NAI NS ARem ARep LA LC LR LS MLA
+metrics=D_MAD
+exps=alexnet-cifar10 xception-imagenet lenet5-fashion-mnist lenet5-mnist resnet50-imagenet vgg16-imagenet vgg19-imagenet densenet121-imagenet mobilenet.1.00.224-imagenet inception.v3-imagenet lstm0-sinewave lstm2-price
+
+# Path of the initial models
+# Name model file as 'alexnet-cifar10_origin.h5'
+origin_model_dir=/data/origin_model
+
+# Path of the ImageNet and regression dataset
+dataset_dir=/data/dataset
+
+# Modifying the backends is not recommended.
+# There is some hard-code in the program about the backends
+backend=tensorflow theano cntk mxnet
+python_prefix = /root/anaconda3/envs/
+output_dir = /data/lemon_outputs
+mutate_num=2
+test_size=10
+pool_size=50
+mutate_ratio=0.3
+gpu_ids = 0,1
+threshold = 0.4
+# minutes
+time_limit = 60
+# use MCMC for mutator selection
+mutator_strategy = MCMC
+
+# use Roulette for mutant selection
+mutant_strategy = Roulette
+
+# use counter,timing
+stop_mode=timing
+
+
+[redis]
+# your-redis-server
+host= 127.0.0.1
+# redis port
+port= 6379 
+# db number
+redis_db= 0 
+

+ 5035 - 0
server/LEMON-master/dataset/DIS.csv

@@ -0,0 +1,5035 @@
+1996-12-31,23.011177,23.175543,22.805719,22.928993,17.804647,2719200
+1997-01-02,22.928993,22.970085,21.942801,22.148258,17.198397,8115400
+1997-01-03,22.394806,22.805719,22.394806,22.764629,17.677013,4650000
+1997-01-06,22.764629,22.928993,22.559172,22.682446,17.613201,3623300
+1997-01-07,22.682446,22.682446,22.435898,22.641354,17.581289,3606200
+1997-01-08,22.641354,22.805719,22.189348,22.312624,17.353743,3225400
+1997-01-09,22.312624,22.600264,22.107166,22.189348,17.257866,4606500
+1997-01-10,22.189348,22.312624,21.819527,22.312624,17.353743,5278700
+1997-01-13,22.312624,22.518080,22.312624,22.353714,17.385700,4653300
+1997-01-14,22.353714,22.846811,22.353714,22.764629,17.705294,4760400
+1997-01-15,22.764629,22.928993,22.476990,22.723537,17.673336,5413500
+1997-01-16,22.723537,22.805719,22.518080,22.641354,17.609415,4509700
+1997-01-17,22.641354,23.052269,22.641354,23.011177,17.897047,5574100
+1997-01-20,23.093359,23.298817,23.093359,23.175543,18.024878,4459500
+1997-01-21,22.970085,22.970085,22.764629,22.928993,17.833128,4988200
+1997-01-22,22.928993,23.298817,22.764629,23.257725,18.088799,5206000
+1997-01-23,23.257725,23.545364,23.134451,23.134451,17.992920,8147900
+1997-01-24,23.134451,23.504274,22.846811,23.422091,18.216633,4667900
+1997-01-27,23.422091,23.627548,23.216633,23.545364,18.312508,4942900
+1997-01-28,23.791914,24.572649,23.791914,23.874096,18.568186,8129700
+1997-01-29,23.915188,24.243919,23.915188,24.243919,18.855822,6218700
+1997-01-30,24.243919,24.367193,23.709730,23.791914,18.504267,4288900
+1997-01-31,23.874096,24.120644,23.874096,23.956278,18.632095,2808300
+1997-02-03,23.956278,23.956278,23.668638,23.956278,18.632095,3429500
+1997-02-04,23.956278,23.997370,23.833004,23.874096,18.568186,2283300
+1997-02-05,23.874096,24.408283,23.709730,23.915188,18.600145,4936500
+1997-02-06,23.997370,24.367193,23.997370,24.326101,18.919729,5348100
+1997-02-07,24.408283,24.778107,24.408283,24.654833,19.175407,4493900
+1997-02-10,24.860289,25.312294,24.860289,24.860289,19.335197,5934000
+1997-02-11,24.860289,24.983564,24.531559,24.654833,19.175407,3703300
+1997-02-12,24.695923,25.271204,24.695923,25.189020,19.590878,3350700
+1997-02-13,25.189020,25.928665,25.147928,25.435568,19.782625,5873400
+1997-02-14,25.517752,26.051939,25.517752,25.682117,19.974382,5753000
+1997-02-18,25.641026,25.641026,25.230112,25.476660,19.814587,4179400
+1997-02-19,25.394478,25.394478,24.983564,25.024654,19.463034,3628100
+1997-02-20,25.024654,25.147928,24.490467,24.572649,19.111486,3057200
+1997-02-21,24.654833,25.024654,24.654833,24.778107,19.271286,3788800
+1997-02-24,24.778107,25.024654,24.490467,24.860289,19.335197,2937600
+1997-02-25,24.860289,25.271204,24.654833,25.024654,19.463034,3177000
+1997-02-26,25.024654,25.106838,24.161736,24.819199,19.303246,4452200
+1997-02-27,24.819199,25.024654,24.819199,24.942472,19.399118,3158500
+1997-02-28,24.737015,24.737015,24.408283,24.408283,18.983654,3166400
+1997-03-03,24.408283,24.613741,23.915188,24.120644,18.759937,4592500
+1997-03-04,24.120644,24.161736,23.833004,23.915188,18.600145,4237800
+1997-03-05,23.915188,24.367193,23.586456,24.161736,18.791895,4667900
+1997-03-06,24.161736,24.819199,24.120644,24.695923,19.207361,4664900
+1997-03-07,24.778107,25.106838,24.778107,24.983564,19.431084,4805100
+1997-03-10,24.983564,25.312294,24.737015,25.230112,19.622831,2586900
+1997-03-11,25.230112,25.517752,25.230112,25.353386,19.718708,3055300
+1997-03-12,25.312294,25.312294,24.695923,24.778107,19.271286,3587700
+1997-03-13,24.737015,24.737015,24.079554,24.079554,18.727982,3148100
+1997-03-14,24.202827,24.737015,24.202827,24.572649,19.111486,4354000
+1997-03-17,24.572649,25.065746,24.367193,24.983564,19.431084,3429800
+1997-03-18,24.983564,25.147928,24.613741,24.819199,19.303246,3160300
+1997-03-19,24.737015,24.737015,24.326101,24.654833,19.175407,2907800
+1997-03-20,24.654833,24.695923,24.367193,24.490467,19.047569,2585300
+1997-03-21,24.490467,24.531559,24.243919,24.326101,18.919729,4190600
+1997-03-24,24.326101,24.695923,24.285009,24.654833,19.175407,3819200
+1997-03-25,24.654833,24.819199,24.490467,24.572649,19.111486,3843200
+1997-03-26,24.572649,24.860289,24.531559,24.737015,19.239321,3139000
+1997-03-27,24.737015,24.860289,24.079554,24.654833,19.175407,4020300
+1997-03-31,24.326101,24.326101,23.750822,23.956278,18.632095,5099900
+1997-04-01,23.956278,24.161736,23.339909,23.791914,18.504267,4324500
+1997-04-02,23.791914,24.285009,23.380999,23.791914,18.504267,4246600
+1997-04-03,23.791914,23.915188,23.422091,23.874096,18.568186,3402700
+1997-04-04,23.874096,24.243919,23.627548,24.243919,18.855822,3583100
+1997-04-07,24.243919,24.367193,24.120644,24.243919,18.855822,2001900
+1997-04-08,24.243919,24.285009,23.997370,24.285009,18.887777,2165500
+1997-04-09,24.285009,24.367193,24.120644,24.243919,18.889563,2186200
+1997-04-10,24.285009,24.695923,24.285009,24.326101,18.953592,4065300
+1997-04-11,24.326101,24.531559,23.791914,23.791914,18.537388,5240400
+1997-04-14,23.709730,23.709730,23.134451,23.380999,18.217224,4348500
+1997-04-15,23.422091,24.367193,23.422091,24.285009,18.921579,3651300
+1997-04-16,24.285009,24.860289,24.120644,24.860289,19.369812,5378200
+1997-04-17,24.860289,25.106838,24.737015,24.901381,19.401827,3389000
+1997-04-18,24.901381,25.312294,24.819199,25.230112,19.657955,3421000
+1997-04-21,25.230112,25.599934,25.065746,25.271204,19.689972,4291300
+1997-04-22,25.271204,25.928665,25.106838,25.723207,20.042145,6545100
+1997-04-23,25.723207,26.175213,25.723207,26.010849,20.266262,6826200
+1997-04-24,26.010849,26.257397,25.558844,25.723207,20.042145,4222900
+1997-04-25,25.723207,25.723207,25.353386,25.435568,19.818035,3440800
+1997-04-28,25.435568,25.723207,25.394478,25.517752,19.882074,3100700
+1997-04-29,25.723207,26.586128,25.723207,26.545036,20.682470,6762600
+1997-04-30,26.545036,27.120316,26.298489,26.873768,20.938601,7079300
+1997-05-01,26.873768,26.955950,26.380671,26.627218,20.746506,3429800
+1997-05-02,26.627218,26.791584,26.421762,26.421762,20.586424,3554500
+1997-05-05,26.421762,27.038134,26.216305,26.997042,21.034653,4317800
+1997-05-06,26.997042,27.079224,26.750494,27.038134,21.066669,4446700
+1997-05-07,27.038134,27.038134,26.380671,26.462852,20.618444,3492800
+1997-05-08,26.462852,26.914858,26.216305,26.709402,20.810539,4810600
+1997-05-09,26.709402,27.038134,26.627218,27.038134,21.066669,3525300
+1997-05-12,27.079224,27.983234,27.079224,27.777779,21.642962,4750000
+1997-05-13,27.736687,27.736687,27.243589,27.490139,21.418844,3574300
+1997-05-14,27.490139,27.818869,27.407955,27.490139,21.418844,2978100
+1997-05-15,27.407955,27.407955,27.161407,27.243589,21.226746,2607200
+1997-05-16,27.161407,27.161407,26.586128,26.586128,20.714487,5604200
+1997-05-19,26.586128,27.407955,26.545036,27.366863,21.322796,2888300
+1997-05-20,27.366863,27.490139,26.832676,27.449047,21.386831,4024200
+1997-05-21,27.449047,27.531229,26.914858,26.997042,21.034653,3175800
+1997-05-22,26.997042,27.202497,26.873768,26.997042,21.034653,2779400
+1997-05-23,27.079224,27.490139,27.079224,27.366863,21.322796,1490800
+1997-05-27,27.366863,27.572321,26.997042,27.531229,21.450869,2683900
+1997-05-28,27.531229,27.654503,27.202497,27.572321,21.482876,2556400
+1997-05-29,27.572321,27.613413,27.079224,27.243589,21.226746,2662900
+1997-05-30,27.038134,27.038134,26.339579,26.914858,20.970621,3864800
+1997-06-02,26.914858,27.079224,26.545036,26.586128,20.714487,2556800
+1997-06-03,26.586128,26.627218,26.421762,26.503944,20.650459,2660500
+1997-06-04,26.503944,26.503944,26.257397,26.257397,20.458361,2701600
+1997-06-05,26.257397,26.832676,26.257397,26.545036,20.682470,2982000
+1997-06-06,26.545036,26.791584,26.339579,26.750494,20.842552,3875500
+1997-06-09,26.750494,26.955950,26.586128,26.627218,20.746506,3240900
+1997-06-10,26.627218,26.955950,26.627218,26.914858,20.970621,3696900
+1997-06-11,26.914858,26.914858,26.586128,26.750494,20.842552,4060700
+1997-06-12,26.750494,26.914858,26.709402,26.832676,20.906586,5036600
+1997-06-13,26.914858,27.613413,26.914858,27.449047,21.386831,5766700
+1997-06-16,27.449047,27.572321,27.284681,27.572321,21.482876,3999600
+1997-06-17,27.572321,27.736687,27.202497,27.613413,21.514900,4468300
+1997-06-18,27.407955,27.407955,27.120316,27.120316,21.130701,3320000
+1997-06-19,27.120316,27.490139,26.750494,27.407955,21.354816,4884800
+1997-06-20,27.407955,27.407955,26.955950,27.038134,21.066669,5484100
+1997-06-23,27.038134,27.079224,26.462852,26.503944,20.650459,2749900
+1997-06-24,26.503944,27.120316,26.462852,27.120316,21.130701,4375900
+1997-06-25,27.120316,27.336029,26.586128,26.955950,21.002640,4961100
+1997-06-26,26.955950,27.038134,26.627218,26.812130,20.890577,4116400
+1997-06-27,26.812130,26.935404,26.257397,26.277943,20.474367,5498400
+1997-06-30,26.277943,26.442308,25.887573,26.380671,20.554409,7595500
+1997-07-01,26.216305,26.216305,25.599934,25.825937,20.122194,6409100
+1997-07-02,25.558844,25.558844,25.127384,25.271204,19.689972,9029200
+1997-07-03,25.353386,25.805391,25.353386,25.394478,19.786020,5407700
+1997-07-07,25.394478,25.723207,24.675379,24.983564,19.465862,10316600
+1997-07-08,25.271204,25.764299,25.271204,25.661572,19.994125,8744200
+1997-07-09,25.661572,25.805391,25.250658,25.332840,19.771429,6603500
+1997-07-10,25.332840,25.620480,25.189020,25.271204,19.723328,5095600
+1997-07-11,25.271204,25.312294,24.901381,25.127384,19.611069,5631000
+1997-07-14,25.127384,25.517752,25.065746,25.435568,19.851603,5911800
+1997-07-15,25.435568,25.620480,25.168474,25.620480,19.995916,4937700
+1997-07-16,25.661572,26.134123,25.661572,25.928665,20.236450,6325500
+1997-07-17,25.928665,25.928665,25.106838,25.558844,19.947811,6023400
+1997-07-18,25.312294,25.312294,24.737015,24.942472,19.466759,7225000
+1997-07-21,24.942472,24.963018,24.264463,24.716469,19.290367,4851000
+1997-07-22,24.737015,25.641026,24.737015,25.517752,19.915741,8507800
+1997-07-23,25.558844,26.175213,25.558844,25.620480,19.995916,6438300
+1997-07-24,25.620480,26.175213,25.147928,26.072485,20.348690,5299400
+1997-07-25,26.051939,26.051939,25.723207,25.969757,20.268520,4235900
+1997-07-28,25.969757,26.236851,25.969757,26.175213,20.428869,3070800
+1997-07-29,26.175213,26.442308,26.134123,26.339579,20.557152,4558100
+1997-07-30,26.442308,26.709402,26.442308,26.606674,20.765612,4818200
+1997-07-31,26.606674,26.853222,26.565582,26.565582,20.733541,3814000
+1997-08-01,26.565582,26.729948,26.380671,26.483398,20.669399,4256900
+1997-08-04,26.483398,26.750494,26.236851,26.606674,20.765612,3811300
+1997-08-05,26.606674,26.627218,26.236851,26.277943,20.509045,3747100
+1997-08-06,26.277943,26.627218,26.175213,26.565582,20.733541,3502500
+1997-08-07,26.565582,26.709402,25.949211,26.010849,20.300585,3525300
+1997-08-08,26.010849,26.113577,25.682117,26.031393,20.316620,3950300
+1997-08-11,26.031393,26.360125,25.517752,26.175213,20.428869,3982800
+1997-08-12,26.175213,26.421762,25.723207,25.764299,20.108166,5537900
+1997-08-13,25.764299,26.277943,25.435568,25.969757,20.268520,5513000
+1997-08-14,25.969757,26.319033,25.517752,25.928665,20.236450,4677000
+1997-08-15,25.846483,25.846483,25.147928,25.189020,19.659176,5088600
+1997-08-18,25.189020,25.908119,24.983564,25.846483,20.172308,5881700
+1997-08-19,25.846483,26.195759,25.641026,26.195759,20.444902,7438200
+1997-08-20,26.195759,26.503944,26.051939,26.421762,20.621296,4050100
+1997-08-21,26.421762,26.442308,25.784845,25.928665,20.236450,3044700
+1997-08-22,25.743753,25.743753,25.373932,25.682117,20.044024,3647600
+1997-08-25,25.682117,25.990303,25.599934,25.743753,20.092133,2708200
+1997-08-26,25.743753,25.867029,25.476660,25.476660,19.883671,2963200
+1997-08-27,25.476660,25.969757,25.271204,25.805391,20.140238,3469700
+1997-08-28,25.805391,25.928665,25.353386,25.558844,19.947811,4221000
+1997-08-29,25.558844,25.682117,25.250658,25.250658,19.707283,2806500
+1997-09-02,25.312294,25.969757,25.312294,25.969757,20.268520,2739300
+1997-09-03,25.969757,26.339579,25.969757,26.154669,20.412836,3063900
+1997-09-04,26.134123,26.134123,25.415024,25.538298,19.931778,5254100
+1997-09-05,25.538298,25.846483,25.476660,25.702663,20.060059,3473600
+1997-09-08,25.702663,26.031393,25.599934,25.599934,19.979887,2294200
+1997-09-09,25.599934,26.010849,25.456114,25.805391,20.140238,2804700
+1997-09-10,25.784845,25.784845,25.538298,25.558844,19.947811,2231000
+1997-09-11,25.271204,25.271204,24.469921,24.901381,19.434690,7274900
+1997-09-12,24.942472,25.415024,24.942472,25.353386,19.787464,4720500
+1997-09-15,25.353386,25.497206,25.230112,25.373932,19.803493,2394600
+1997-09-16,25.435568,26.093031,25.435568,25.990303,20.284554,3703600
+1997-09-17,25.990303,26.380671,25.805391,25.928665,20.236450,3701800
+1997-09-18,25.928665,26.236851,25.558844,26.072485,20.348690,5335900
+1997-09-19,26.072485,26.277943,26.051939,26.277943,20.509045,3995600
+1997-09-22,26.277943,26.360125,26.134123,26.154669,20.412836,2927000
+1997-09-23,26.154669,26.195759,25.846483,25.969757,20.268520,2722500
+1997-09-24,25.969757,26.277943,25.867029,25.867029,20.188345,3053800
+1997-09-25,25.867029,26.175213,25.723207,26.175213,20.428869,3135900
+1997-09-26,26.175213,26.360125,26.031393,26.134123,20.396801,3346500
+1997-09-29,26.134123,26.627218,26.010849,26.298489,20.525084,3188600
+1997-09-30,26.298489,26.771038,26.216305,26.503944,20.685432,3769000
+1997-10-01,26.565582,27.079224,26.565582,26.955950,21.038208,4255400
+1997-10-02,27.038134,27.592867,27.038134,27.551775,21.503229,5180200
+1997-10-03,27.613413,28.209238,27.613413,27.736687,21.647551,5642300
+1997-10-06,27.736687,27.777779,27.202497,27.284681,21.294773,2672000
+1997-10-07,27.366863,28.085962,27.366863,28.003780,21.855999,3330000
+1997-10-08,27.921598,27.921598,27.325773,27.613413,21.551332,4423600
+1997-10-09,27.613413,27.757233,27.387409,27.675049,21.599442,2396700
+1997-10-10,27.633959,27.633959,27.366863,27.510683,21.471159,2805600
+1997-10-13,27.654503,27.901052,27.654503,27.818869,21.711687,1503600
+1997-10-14,27.901052,28.558514,27.901052,28.414694,22.176712,3747400
+1997-10-15,28.414694,28.866699,28.188692,28.558514,22.323038,5331400
+1997-10-16,28.517424,28.517424,27.469593,27.921598,21.825191,4013900
+1997-10-17,27.777779,27.777779,26.729948,27.161407,21.230982,5957700
+1997-10-20,27.161407,27.490139,27.120316,27.469593,21.471876,3231200
+1997-10-21,27.469593,27.654503,27.305227,27.510683,21.503998,2879500
+1997-10-22,27.510683,27.736687,27.284681,27.305227,21.343399,3162700
+1997-10-23,27.264135,27.264135,26.832676,27.058678,21.150684,4010800
+1997-10-24,27.058678,27.901052,26.997042,27.079224,21.166742,6648800
+1997-10-27,27.079224,27.223043,25.558844,25.558844,19.978327,5425700
+1997-10-28,25.558844,27.202497,24.654833,27.058678,21.150684,9143900
+1997-10-29,27.058678,27.531229,26.709402,27.264135,21.311279,5073700
+1997-10-30,27.264135,27.325773,26.462852,26.503944,20.717072,3697500
+1997-10-31,26.853222,27.366863,26.853222,27.079224,21.166742,3521400
+1997-11-03,27.346317,28.106508,27.346317,28.106508,21.969728,3678900
+1997-11-04,28.106508,28.373604,27.983234,28.353058,22.162449,3481800
+1997-11-05,28.353058,28.373604,27.921598,27.983234,21.873371,4063100
+1997-11-06,27.983234,28.517424,27.777779,28.394148,22.194565,4229900
+1997-11-07,28.394148,28.763971,27.777779,28.373604,22.178505,4373700
+1997-11-10,28.373604,28.681787,28.065418,28.085962,21.953667,2133600
+1997-11-11,28.106508,28.496878,28.106508,28.270874,22.098206,1772200
+1997-11-12,28.270874,28.373604,27.305227,27.305227,21.343399,4388900
+1997-11-13,27.551775,28.044872,27.551775,27.839415,21.760958,4402600
+1997-11-14,27.983234,28.763971,27.983234,28.496878,22.274862,4858000
+1997-11-17,28.887245,29.503616,28.887245,29.257069,22.869080,5062400
+1997-11-18,29.257069,29.688528,28.846153,29.339251,22.933313,6419200
+1997-11-19,29.380342,30.037804,29.380342,30.037804,23.479343,4947500
+1997-11-20,30.037804,30.119987,29.750164,29.811802,23.302692,7153500
+1997-11-21,29.914530,31.147272,29.914530,31.147272,24.346569,7606200
+1997-11-24,30.962360,30.962360,30.428173,30.777449,24.057491,7187000
+1997-11-25,30.777449,30.982906,30.530901,30.777449,24.057491,4218900
+1997-11-26,30.880178,31.352728,30.880178,31.065088,24.282331,3984400
+1997-11-28,31.106180,31.332182,31.106180,31.208908,24.394751,1665700
+1997-12-01,31.208908,32.195103,30.941814,32.092373,25.085323,3758900
+1997-12-02,32.092373,32.133465,31.455458,31.743097,24.812304,4487800
+1997-12-03,31.743097,31.804733,30.962360,31.291092,24.458992,3557300
+1997-12-04,30.880178,30.880178,30.654175,30.756903,24.041437,7151400
+1997-12-05,30.756903,31.044542,30.571993,30.900723,24.153852,5479800
+1997-12-08,30.900723,31.126726,30.880178,31.023998,24.250212,3344600
+1997-12-09,31.023998,31.332182,30.715813,31.003452,24.234156,3793600
+1997-12-10,30.900723,30.900723,30.654175,30.839087,24.105679,2878600
+1997-12-11,30.654175,30.654175,30.263807,30.510355,23.848717,3465100
+1997-12-12,30.530901,31.085634,30.530901,30.777449,24.057491,3890400
+1997-12-15,30.962360,31.989645,30.962360,31.928007,24.956839,5753900
+1997-12-16,31.928007,31.928007,31.455458,31.558186,24.667768,4319300
+1997-12-17,31.660913,31.866371,31.660913,31.825279,24.876543,2828100
+1997-12-18,31.825279,32.154011,31.640368,31.989645,25.005018,4192100
+1997-12-19,31.681459,31.681459,30.654175,31.496548,24.619583,7934700
+1997-12-22,31.496548,32.195103,31.476002,32.010189,25.021078,3344300
+1997-12-23,32.030735,32.380013,32.030735,32.051281,25.053204,4218000
+1997-12-24,32.051281,32.112919,31.167818,31.229454,24.410812,1665700
+1997-12-26,31.229454,31.619822,31.188362,31.250000,24.426868,655200
+1997-12-29,31.393820,32.030735,31.393820,31.886917,24.924717,2310300
+1997-12-30,32.215649,32.955292,32.215649,32.544380,25.438633,6088500
+1997-12-31,32.544380,32.955292,32.441650,32.544380,25.438633,4096900
+1998-01-02,32.544380,32.811474,32.421104,32.749836,25.599234,2965600
+1998-01-05,32.749836,33.119659,32.112919,32.482742,25.390455,11894800
+1998-01-06,32.441650,32.441650,32.215649,32.359467,25.294098,4303500
+1998-01-07,32.359467,32.380013,32.010189,32.380013,25.344141,4472000
+1998-01-08,32.359467,32.359467,31.866371,32.112919,25.135082,2826900
+1998-01-09,32.112919,32.133465,31.558186,31.558186,24.700891,5429000
+1998-01-12,31.558186,31.599277,30.756903,31.044542,24.298853,6147200
+1998-01-13,31.044542,31.393820,30.818541,31.229454,24.443588,5070700
+1998-01-14,31.229454,31.928007,30.962360,31.722551,24.829540,3428000
+1998-01-15,31.578732,31.578732,31.126726,31.455458,24.620476,3033100
+1998-01-16,31.455458,31.743097,31.311638,31.599277,24.733047,4488700
+1998-01-20,31.804733,32.790928,31.804733,32.667652,25.569281,4064700
+1998-01-21,32.667652,32.708744,32.236195,32.503288,25.440630,3562400
+1998-01-22,32.421104,32.421104,31.660913,31.722551,24.829540,2696700
+1998-01-23,31.825279,32.606014,31.825279,32.421104,25.376301,5534900
+1998-01-26,32.421104,32.647106,31.886917,32.010189,25.054676,3097000
+1998-01-27,32.503288,34.229126,32.503288,33.777119,26.437664,9836000
+1998-01-28,34.023670,35.092045,34.023670,34.516766,27.016596,8960800
+1998-01-29,34.516766,35.626232,34.393490,34.927681,27.338223,5564700
+1998-01-30,34.927681,35.461868,34.619495,35.133137,27.499037,4329000
+1998-02-02,35.133137,35.913872,35.112591,35.770054,27.997561,5585100
+1998-02-03,35.585140,35.585140,35.092045,35.502960,27.788498,4228600
+1998-02-04,35.502960,35.749508,35.276955,35.605686,27.868904,3452000
+1998-02-05,35.605686,35.954964,35.318047,35.605686,27.868904,3539300
+1998-02-06,35.605686,35.872780,35.523506,35.687870,27.933229,2246800
+1998-02-09,35.687870,36.037148,35.359138,35.400230,27.708082,2616700
+1998-02-10,35.400230,35.872780,35.009861,35.667324,27.917143,3399700
+1998-02-11,35.667324,36.427513,35.605686,36.427513,28.512152,3926300
+1998-02-12,36.304241,36.304241,35.790600,36.201511,28.335255,4166000
+1998-02-13,36.201511,36.694611,35.934418,36.632973,28.672964,2932100
+1998-02-17,36.735699,37.146614,36.735699,37.146614,29.075005,4479300
+1998-02-18,37.146614,37.886257,36.858974,37.762985,29.557442,3645800
+1998-02-19,37.762985,37.968441,37.310978,37.536983,29.380547,3734000
+1998-02-20,37.536983,37.762985,37.228798,37.598618,29.428782,4215900
+1998-02-23,37.598618,38.050625,37.310978,37.495892,29.348383,4578500
+1998-02-24,37.454800,37.454800,36.674065,36.797337,28.801624,5078000
+1998-02-25,36.797337,36.858974,36.489151,36.571335,28.624729,4204900
+1998-02-26,36.530243,36.530243,35.831688,36.160419,28.303093,4426400
+1998-02-27,36.160419,37.023338,35.831688,36.797337,28.801624,4575400
+1998-03-02,36.797337,36.797337,35.831688,35.954964,28.142281,3736700
+1998-03-03,35.954964,36.386425,35.831688,36.160419,28.303093,3157900
+1998-03-04,35.153683,35.153683,34.414036,35.133137,27.499037,11663000
+1998-03-05,34.804405,34.804405,34.434582,34.578403,27.064846,6630900
+1998-03-06,34.578403,34.989315,34.557858,34.722221,27.177406,4022400
+1998-03-09,34.722221,34.989315,34.598949,34.783859,27.225657,3418500
+1998-03-10,34.783859,34.907135,34.681129,34.804405,27.241732,3103100
+1998-03-11,34.701675,34.701675,34.311310,34.455128,26.968346,3551800
+1998-03-12,34.455128,34.948223,34.372944,34.824951,27.257813,3297800
+1998-03-13,34.824951,35.461868,34.537312,34.845497,27.273893,3057200
+1998-03-16,34.907135,36.160419,34.907135,36.037148,28.206619,3836500
+1998-03-17,35.749508,35.749508,35.071499,35.400230,27.708082,3336700
+1998-03-18,35.338593,35.338593,34.845497,35.276955,27.611609,3048300
+1998-03-19,35.276955,35.400230,34.763313,34.948223,27.354300,3282000
+1998-03-20,34.948223,35.174229,34.640041,35.174229,27.531199,5122100
+1998-03-23,34.763313,34.763313,33.777119,33.859303,26.501991,7810300
+1998-03-24,34.393490,35.276955,34.393490,35.153683,27.515114,8031700
+1998-03-25,35.153683,35.297501,34.804405,35.133137,27.499037,3650700
+1998-03-26,35.133137,35.544048,34.886589,35.400230,27.708082,3245800
+1998-03-27,35.400230,35.564594,35.174229,35.256409,27.595518,3202900
+1998-03-30,35.256409,35.338593,34.681129,34.701675,27.161325,2790400
+1998-03-31,34.701675,35.338593,34.681129,35.092045,27.466869,3754400
+1998-04-01,35.092045,35.174229,34.496220,34.845497,27.273893,4573900
+1998-04-02,34.845497,35.811146,34.660583,35.790600,28.013636,4667300
+1998-04-03,35.790600,36.468605,35.544048,36.468605,28.544315,4345800
+1998-04-06,36.468605,36.550789,35.585140,35.626232,27.884987,3110100
+1998-04-07,35.626232,35.646778,35.194775,35.461868,27.796865,2457000
+1998-04-08,35.461868,35.502960,34.742767,35.174229,27.571396,3257000
+1998-04-09,35.174229,35.975510,35.009861,35.852234,28.102850,3758900
+1998-04-13,35.852234,36.283695,35.811146,36.242603,28.408836,3730700
+1998-04-14,36.653519,37.228798,36.653519,37.208252,29.165771,4876000
+1998-04-15,37.208252,37.742439,36.982250,37.331524,29.262398,6090900
+1998-04-16,37.290436,37.290436,36.797337,36.817883,28.859776,3992600
+1998-04-17,36.817883,37.290436,36.694611,37.269890,29.214083,4044600
+1998-04-20,37.290436,38.091717,37.290436,37.927349,29.729425,5386400
+1998-04-21,37.927349,38.440994,37.639709,38.379356,30.083740,4772200
+1998-04-22,38.379356,40.105194,38.030079,40.105194,31.436537,3832300
+1998-04-23,40.577744,41.954308,40.577744,40.659927,31.871374,27489900
+1998-04-24,40.659927,40.659927,39.386093,40.146286,31.468758,15720700
+1998-04-27,39.920284,39.920284,39.201183,39.755917,31.162764,7400500
+1998-04-28,40.269558,40.762657,40.269558,40.598289,31.823061,7635100
+1998-04-29,40.598289,40.927021,40.433926,40.721565,31.919689,4432400
+1998-04-30,40.783203,41.296844,40.783203,40.947567,32.096836,5121200
+1998-05-01,40.947567,41.317390,40.885929,41.317390,32.386723,3760200
+1998-05-04,41.522846,42.200855,41.522846,41.789940,32.757137,4462600
+1998-05-05,41.789940,42.015942,41.009205,41.646122,32.644394,3946000
+1998-05-06,41.605030,41.605030,40.762657,40.824295,32.000214,3086700
+1998-05-07,39.632645,39.632645,39.098454,39.180637,30.711826,11144000
+1998-05-08,39.180637,39.427185,39.077908,39.283367,30.792353,6186800
+1998-05-11,39.324459,39.858646,39.324459,39.735371,31.146658,3920500
+1998-05-12,39.653187,39.653187,39.036819,39.057365,30.615198,3690800
+1998-05-13,39.057365,39.324459,38.790268,38.954636,30.534672,4250200
+1998-05-14,38.584812,38.584812,37.968441,38.153355,29.906591,7264200
+1998-05-15,37.660255,37.660255,36.345333,36.365879,28.505474,15286000
+1998-05-18,36.365879,36.982250,35.852234,36.057693,28.263895,9196500
+1998-05-19,36.201511,36.756245,36.201511,36.468605,28.585987,7094200
+1998-05-20,36.612427,37.865715,36.612427,37.680801,29.536180,6820400
+1998-05-21,37.680801,38.379356,37.680801,38.112263,29.874376,5714700
+1998-05-22,38.112263,38.379356,37.968441,38.235535,29.971008,3951200
+1998-05-26,38.235535,38.379356,36.509697,36.735699,28.795349,5932200
+1998-05-27,36.735699,37.762985,36.386425,37.578075,29.455658,7227400
+1998-05-28,37.578075,37.598618,36.817883,37.269890,29.214083,3633300
+1998-05-29,37.269890,37.557529,37.187706,37.228798,29.181873,3170000
+1998-06-01,37.228798,37.434254,36.283695,36.427513,28.553791,4099700
+1998-06-02,36.427513,36.817883,36.016602,36.201511,28.376635,6361400
+1998-06-03,36.201511,36.612427,35.852234,36.016602,28.231688,3957300
+1998-06-04,36.016602,36.119328,35.523506,36.016602,28.231688,3891000
+1998-06-05,36.817883,37.783531,36.817883,37.701347,29.552275,8902100
+1998-06-08,37.701347,38.112263,37.352070,38.091717,29.858265,6196800
+1998-06-09,38.091717,38.297173,37.721893,38.297173,30.019331,4121300
+1998-06-10,38.461540,39.345005,38.461540,38.892998,30.486359,7016600
+1998-06-11,38.892998,39.036819,38.153355,38.194443,29.938801,4684300
+1998-06-12,38.194443,38.297173,37.228798,37.824623,29.648914,5859800
+1998-06-15,37.804077,37.804077,36.879520,36.879520,28.908091,4743300
+1998-06-16,36.900066,37.516438,36.900066,37.105522,29.085249,6130200
+1998-06-17,37.146614,38.214989,37.146614,37.578075,29.455658,6538700
+1998-06-18,37.578075,37.619164,36.797337,36.817883,28.859776,7302900
+1998-06-19,36.119328,36.119328,35.133137,35.359138,27.716333,21131200
+1998-06-22,35.359138,35.811146,34.352398,35.749508,28.022327,15361100
+1998-06-23,35.749508,36.283695,35.708416,36.078239,28.279999,6274400
+1998-06-24,36.078239,37.249344,36.057693,37.043884,29.036926,6249100
+1998-06-25,37.043884,37.454800,36.674065,36.982250,28.988621,6252200
+1998-06-26,36.941158,36.941158,36.283695,36.324787,28.473261,4606800
+1998-06-29,36.776791,37.393162,36.776791,37.208252,29.165771,5392500
+1998-06-30,36.078239,36.078239,34.537312,34.537312,27.072149,22362300
+1998-07-01,34.866043,35.338593,34.866043,34.927681,27.378138,14410800
+1998-07-02,34.927681,34.989315,34.598949,34.989315,27.426443,8207300
+1998-07-06,34.948223,34.948223,34.455128,34.783859,27.265400,7872600
+1998-07-07,34.845497,35.174229,34.845497,35.030407,27.458658,8995400
+1998-07-08,35.030407,35.811146,35.030407,35.605686,27.909595,9645800
+1998-07-09,35.667324,36.632973,35.667324,36.489151,28.602104,13011200
+1998-07-10,37.968441,37.968441,35.996056,37.598618,29.471764,15854300
+1998-07-13,37.845169,39.016273,37.845169,38.831360,30.438049,15306300
+1998-07-14,39.077908,39.694279,39.077908,39.324459,30.824556,12756600
+1998-07-15,39.324459,39.755917,38.646450,38.831360,30.438049,8713200
+1998-07-16,38.831360,38.892998,37.536983,38.769722,30.389729,10147600
+1998-07-17,38.769722,39.016273,38.276627,38.831360,30.438049,5878400
+1998-07-20,38.831360,39.201183,37.968441,38.030079,29.809967,6405000
+1998-07-21,38.030079,38.030079,36.920612,37.228798,29.181873,6457300
+1998-07-22,37.228798,37.475346,36.304241,36.674065,28.786701,8617400
+1998-07-23,36.674065,37.352070,35.502960,35.749508,28.060980,4679000
+1998-07-24,35.749508,36.489151,35.564594,36.304241,28.496407,4283500
+1998-07-27,36.180965,36.180965,35.502960,36.119328,28.351269,3906700
+1998-07-28,36.057693,36.057693,34.578403,34.886589,27.383642,6300700
+1998-07-29,34.886589,35.256409,34.578403,34.886589,27.383642,4168000
+1998-07-30,34.886589,35.009861,33.530571,34.824951,27.335260,5601700
+1998-07-31,34.824951,35.009861,33.777119,33.962032,26.657930,3860500
+1998-08-03,34.023670,34.516766,34.023670,34.208580,26.851454,4230800
+1998-08-04,34.208580,34.701675,32.297829,32.359467,25.400028,7750000
+1998-08-05,32.359467,33.592209,31.804733,33.099113,25.980600,7835400
+1998-08-06,33.099113,33.530571,32.606014,32.852566,25.787073,5529400
+1998-08-07,32.852566,33.099113,32.421104,32.914200,25.835449,4928800
+1998-08-10,32.914200,33.160751,32.112919,32.421104,25.448406,4527500
+1998-08-11,32.051281,32.051281,31.003452,31.681459,24.867836,8611100
+1998-08-12,31.681459,32.482742,31.558186,31.989645,25.109737,7060200
+1998-08-13,31.989645,32.975838,31.126726,31.373274,24.625931,4909300
+1998-08-14,31.373274,31.989645,30.818541,31.311638,24.577547,4505800
+1998-08-17,31.311638,31.804733,30.695267,31.743097,24.916218,4437200
+1998-08-18,31.743097,33.160751,31.681459,33.037476,25.932220,5901700
+1998-08-19,33.037476,33.345661,32.174557,32.667652,25.641933,4601800
+1998-08-20,32.667652,33.407299,32.482742,33.160751,26.028976,3848900
+1998-08-21,32.975838,32.975838,31.866371,32.790928,25.738691,5337000
+1998-08-24,32.790928,33.468933,32.359467,32.914200,25.835449,3388800
+1998-08-25,33.160751,34.208580,33.160751,33.592209,26.367643,4317300
+1998-08-26,33.160751,33.160751,32.051281,32.544380,25.545170,5527000
+1998-08-27,32.051281,32.051281,30.510355,31.311638,24.577547,7438800
+1998-08-28,31.250000,31.250000,29.647436,30.202169,23.706688,7073300
+1998-08-31,30.202169,30.818541,25.641026,27.058678,21.239260,11516500
+1998-09-01,27.366863,30.078896,27.366863,29.955622,23.513163,15415000
+1998-09-02,29.955622,31.434912,29.154339,29.462524,23.126114,8292600
+1998-09-03,29.339251,29.339251,27.613413,27.921598,21.916592,7348500
+1998-09-04,27.921598,28.353058,27.120316,27.859961,21.868210,7735100
+1998-09-08,27.859961,29.770710,27.736687,28.722879,22.545546,8947000
+1998-09-09,28.661243,28.661243,27.181953,27.428501,21.529545,8494500
+1998-09-10,27.366863,27.366863,26.257397,26.688856,20.948975,8365800
+1998-09-11,26.688856,27.551775,25.394478,25.456114,19.981354,7289200
+1998-09-14,24.901381,24.901381,23.545364,24.100100,18.916969,24024900
+1998-09-15,24.469921,25.147928,24.469921,25.147928,19.739449,11066600
+1998-09-16,25.147928,25.641026,24.839743,25.394478,19.932972,9827500
+1998-09-17,25.394478,25.394478,24.654833,25.332840,19.884590,6299900
+1998-09-18,25.332840,25.517752,24.778107,25.209566,19.787825,8022900
+1998-09-21,25.024654,25.024654,24.531559,24.778107,19.449160,5004400
+1998-09-22,24.778107,25.147928,24.593195,24.839743,19.497549,6216500
+1998-09-23,25.394478,26.627218,25.394478,26.442308,20.755453,8634500
+1998-09-24,26.442308,26.750494,25.394478,25.517752,20.029732,6325100
+1998-09-25,25.517752,25.641026,25.147928,25.271204,19.836208,4416200
+1998-09-28,25.456114,25.949211,25.456114,25.887573,20.320021,4347000
+1998-09-29,25.887573,26.195759,25.394478,25.641026,20.126497,4486400
+1998-09-30,25.641026,25.825937,24.716469,25.024654,19.642687,5170200
+1998-10-01,24.901381,24.901381,24.161736,24.593195,19.304018,7020400
+1998-10-02,24.593195,25.394478,24.161736,24.901381,19.545919,5299100
+1998-10-05,24.901381,25.147928,24.408283,24.716469,19.400780,4276200
+1998-10-06,24.716469,25.641026,24.593195,25.024654,19.642687,4725200
+1998-10-07,25.024654,25.579388,24.223373,24.593195,19.343658,4705300
+1998-10-08,24.469921,24.469921,22.312624,23.175543,18.228607,9845700
+1998-10-09,23.175543,23.483728,22.189348,23.360455,18.374056,6219000
+1998-10-12,24.100100,24.716469,24.100100,24.285009,19.101255,4582100
+1998-10-13,24.285009,25.271204,23.853550,25.147928,19.779978,4925300
+1998-10-14,25.147928,25.271204,23.915188,24.038462,18.907339,5313000
+1998-10-15,24.038462,25.641026,23.853550,25.517752,20.070860,5282600
+1998-10-16,25.517752,25.641026,24.778107,25.147928,19.779978,6485500
+1998-10-19,25.147928,25.702663,24.778107,25.702663,20.216303,5149700
+1998-10-20,25.825937,27.120316,25.825937,26.688856,20.991987,7956000
+1998-10-21,26.688856,27.366863,26.565582,27.181953,21.379835,5162100
+1998-10-22,27.181953,27.613413,26.688856,27.613413,21.719194,3663000
+1998-10-23,27.428501,27.428501,26.688856,27.366863,21.525272,3919500
+1998-10-26,27.366863,27.613413,27.120316,27.551775,21.670713,3418000
+1998-10-27,27.366863,27.366863,26.380671,26.442308,20.798069,6057400
+1998-10-28,26.442308,26.688856,25.394478,26.072485,20.507183,5740100
+1998-10-29,26.072485,26.503944,25.764299,26.257397,20.652630,3825900
+1998-10-30,26.257397,26.935404,26.195759,26.565582,20.895031,4920600
+1998-11-02,27.058678,28.722879,27.058678,28.414694,22.349440,7346900
+1998-11-03,28.476332,29.339251,28.476332,29.154339,22.931204,8542200
+1998-11-04,29.154339,29.832348,28.168146,28.229782,22.203995,8859600
+1998-11-05,28.229782,29.770710,27.551775,29.770710,23.416010,7151500
+1998-11-06,29.770710,30.202169,29.400888,29.832348,23.464487,6081900
+1998-11-09,29.832348,30.202169,28.907791,29.154339,22.931204,4963100
+1998-11-10,28.969427,28.969427,28.291420,28.476332,22.397919,6490800
+1998-11-11,28.476332,28.907791,28.414694,28.476332,22.397919,4391100
+1998-11-12,28.414694,28.414694,27.736687,28.168146,22.155519,4227700
+1998-11-13,28.168146,28.969427,27.859961,28.722879,22.591841,4344600
+1998-11-16,28.722879,29.400888,28.476332,28.661243,22.543358,5098000
+1998-11-17,28.661243,28.846153,28.168146,28.661243,22.543358,4969600
+1998-11-18,28.661243,28.722879,27.859961,28.599606,22.494881,5500100
+1998-11-19,28.476332,28.476332,27.736687,27.921598,21.961599,7068500
+1998-11-20,27.921598,28.291420,27.736687,28.106508,22.107038,6149600
+1998-11-23,28.229782,29.092703,28.229782,29.092703,22.882723,6936100
+1998-11-24,29.092703,30.448717,29.092703,29.400888,23.125122,10323000
+1998-11-25,29.400888,30.510355,29.215977,30.510355,23.997768,6416500
+1998-11-27,30.510355,31.681459,30.387081,31.434912,24.724974,4407600
+1998-11-30,31.434912,32.667652,31.126726,31.743097,24.967379,9385400
+1998-12-01,31.558186,31.558186,30.695267,31.065088,24.434093,6403100
+1998-12-02,31.003452,31.003452,30.263807,30.756903,24.191689,4637100
+1998-12-03,30.756903,31.496548,30.387081,30.448717,23.949287,4585900
+1998-12-04,30.818541,31.311638,30.818541,30.941814,24.337135,3951900
+1998-12-07,30.941814,31.496548,30.448717,30.695267,24.143213,4523700
+1998-12-08,30.880178,31.928007,30.880178,31.558186,24.821936,6075300
+1998-12-09,31.619822,33.284023,31.619822,33.099113,26.033943,7122200
+1998-12-10,33.099113,33.222385,31.804733,31.989645,25.161297,5615600
+1998-12-11,31.619822,31.619822,31.003452,31.558186,24.821936,4689900
+1998-12-14,31.065088,31.065088,29.709072,30.202169,23.755365,5454800
+1998-12-15,30.263807,30.941814,30.263807,30.325443,23.852329,5428200
+1998-12-16,30.325443,31.003452,30.140533,30.325443,23.852329,4315300
+1998-12-17,30.325443,30.756903,30.325443,30.448717,23.949287,3646700
+1998-12-18,30.448717,30.510355,29.647436,30.387081,23.900812,6266700
+1998-12-21,30.387081,30.941814,29.893984,30.571993,24.046257,5879900
+1998-12-22,30.571993,32.236195,29.524162,29.524162,23.222088,6814000
+1998-12-23,29.770710,30.571993,29.770710,30.510355,23.997768,5935000
+1998-12-24,30.510355,30.571993,30.078896,30.140533,23.706888,1827300
+1998-12-28,30.140533,30.571993,29.832348,30.017258,23.609928,4667900
+1998-12-29,30.017258,30.263807,29.832348,30.017258,23.609928,4545300
+1998-12-30,30.017258,30.078896,29.339251,29.462524,23.173605,5160800
+1998-12-31,29.462524,29.709072,29.154339,29.585798,23.270563,5654200
+1999-01-04,29.585798,30.263807,28.846153,29.154339,22.931204,7204500
+1999-01-05,29.277613,30.202169,29.277613,30.140533,23.706888,6306000
+1999-01-06,30.140533,30.571993,29.339251,30.510355,23.997768,10616400
+1999-01-07,30.510355,31.311638,29.585798,31.311638,24.628010,11119600
+1999-01-08,31.373274,32.359467,31.373274,32.112919,25.258257,10827800
+1999-01-11,32.606014,35.009861,32.606014,34.763313,27.342911,17537500
+1999-01-12,36.674065,38.153355,36.674065,37.475346,29.476057,21567200
+1999-01-13,36.858974,36.858974,33.900394,35.626232,28.021633,15807800
+1999-01-14,35.626232,36.735699,35.318047,35.564594,27.973158,7617700
+1999-01-15,35.564594,36.180965,35.133137,35.502960,27.924677,6613500
+1999-01-19,35.502960,36.119328,35.318047,35.996056,28.312527,7126100
+1999-01-20,35.996056,36.427513,35.379684,35.502960,27.924677,7529300
+1999-01-21,35.502960,35.502960,33.653847,34.085304,26.809626,7559000
+1999-01-22,34.085304,34.085304,33.530571,33.653847,26.470270,5123400
+1999-01-25,33.653847,34.516766,33.592209,34.455128,27.100513,4173500
+1999-01-26,34.455128,34.886589,33.715485,34.085304,26.809626,5753200
+1999-01-27,33.900394,33.900394,32.236195,32.482742,25.549147,11787600
+1999-01-28,32.852566,33.900394,32.852566,33.037476,25.985464,12284300
+1999-01-29,33.037476,33.530571,32.544380,32.544380,25.597622,8987600
+1999-02-01,32.790928,33.468933,32.790928,33.222385,26.130911,6278700
+1999-02-02,33.222385,33.222385,32.359467,32.914200,25.888506,6068100
+1999-02-03,32.914200,33.407299,32.790928,33.160751,26.082430,6682900
+1999-02-04,33.160751,33.592209,32.544380,32.667652,25.694580,5900700
+1999-02-05,33.099113,34.208580,33.099113,33.777119,26.567226,7463000
+1999-02-08,33.777119,33.777119,33.345661,33.530571,26.373308,4884100
+1999-02-09,33.530571,34.146942,32.975838,33.037476,25.985464,5851500
+1999-02-10,33.037476,33.715485,32.852566,33.592209,26.421789,6147100
+1999-02-11,33.592209,35.009861,33.345661,34.701675,27.294434,6631500
+1999-02-12,34.701675,35.071499,34.085304,34.948223,27.488358,6741300
+1999-02-16,34.948223,35.502960,34.146942,34.578403,27.197472,5065300
+1999-02-17,34.578403,34.763313,34.085304,34.393490,27.052031,4532200
+1999-02-18,34.393490,34.393490,33.777119,34.023670,26.761150,4894700
+1999-02-19,34.023670,34.023670,33.530571,33.653847,26.470270,3219000
+1999-02-22,33.653847,34.640041,33.653847,34.393490,27.052031,5520100
+1999-02-23,34.393490,35.009861,34.393490,34.393490,27.052031,5031200
+1999-02-24,34.455128,35.502960,34.455128,34.578403,27.197472,6245100
+1999-02-25,34.578403,34.763313,33.592209,34.023670,26.761150,5412300
+1999-02-26,34.023670,35.194775,33.592209,34.701675,27.294434,5844200
+1999-03-01,34.701675,34.763313,34.085304,34.331856,27.003557,4674600
+1999-03-02,34.331856,34.701675,33.592209,33.715485,26.518751,4393600
+1999-03-03,33.900394,34.640041,33.900394,34.023670,26.761150,5006700
+1999-03-04,34.085304,34.824951,34.085304,34.701675,27.294434,5184500
+1999-03-05,35.071499,35.502960,35.071499,35.318047,27.779232,5304700
+1999-03-08,35.441322,36.180965,35.441322,35.934418,28.264036,6136200
+1999-03-09,35.318047,35.318047,33.838757,34.208580,26.906593,8412500
+1999-03-10,34.208580,34.578403,33.900394,34.023670,26.761150,5654300
+1999-03-11,34.023670,34.455128,33.653847,34.208580,26.906593,5430400
+1999-03-12,35.133137,35.934418,35.133137,35.564594,27.973158,8857200
+1999-03-15,35.564594,36.057693,34.640041,35.379684,27.827723,5640400
+1999-03-16,35.379684,35.934418,34.578403,34.578403,27.197472,4257700
+1999-03-17,34.578403,35.071499,34.023670,34.023670,26.761150,4276700
+1999-03-18,34.208580,35.194775,34.208580,34.948223,27.488358,5384600
+1999-03-19,34.948223,35.318047,34.393490,34.578403,27.197472,5642900
+1999-03-22,34.516766,34.516766,33.777119,33.900394,26.664186,4400100
+1999-03-23,33.715485,33.715485,32.359467,32.729290,25.743061,9928700
+1999-03-24,32.729290,32.790928,32.174557,32.729290,25.743061,6109700
+1999-03-25,32.729290,33.160751,32.421104,32.914200,25.888506,8047100
+1999-03-26,32.914200,32.914200,32.297829,32.482742,25.549147,5630500
+1999-03-29,32.482742,32.729290,32.297829,32.606014,25.646103,6408900
+1999-03-30,32.606014,32.606014,31.866371,32.359467,25.452179,6015400
+1999-03-31,32.359467,32.606014,30.633629,30.695267,24.143213,10717100
+1999-04-01,30.695267,31.373274,30.571993,31.065088,24.434093,5880700
+1999-04-05,31.065088,31.804733,30.941814,31.065088,24.434093,5979700
+1999-04-06,31.065088,31.311638,30.140533,30.202169,23.755365,7777800
+1999-04-07,30.202169,32.975838,30.078896,32.421104,25.500660,12212400
+1999-04-08,32.544380,33.592209,32.544380,33.592209,26.421789,13025800
+1999-04-09,33.592209,34.331856,33.345661,34.023670,26.761150,7634500
+1999-04-12,34.270218,35.502960,34.270218,34.948223,27.488358,9852100
+1999-04-13,34.948223,35.441322,33.900394,34.270218,26.955070,6488300
+1999-04-14,34.270218,34.948223,33.284023,34.516766,27.148994,7044000
+1999-04-15,34.516766,34.763313,33.160751,33.592209,26.421789,4857700
+1999-04-16,33.592209,33.777119,32.790928,32.914200,25.888506,4387400
+1999-04-19,32.914200,33.407299,31.619822,31.989645,25.161297,6370900
+1999-04-20,31.989645,32.667652,31.804733,32.606014,25.646103,4417100
+1999-04-21,32.606014,33.037476,31.681459,32.975838,25.936983,5262300
+1999-04-22,32.975838,33.284023,32.482742,32.975838,25.936983,4404400
+1999-04-23,32.975838,34.516766,32.112919,34.023670,26.761150,5988400
+1999-04-26,34.085304,34.763313,34.085304,34.516766,27.148994,5940200
+1999-04-27,33.838757,33.838757,31.989645,32.051281,25.209776,11121200
+1999-04-28,32.051281,32.051281,30.756903,32.051281,25.209776,11752200
+1999-04-29,31.619822,31.619822,29.770710,29.955622,23.561445,10041400
+1999-04-30,29.955622,31.311638,29.709072,31.311638,24.628010,7897700
+1999-05-03,31.188362,31.188362,30.263807,30.818541,24.240175,6438100
+1999-05-04,30.695267,30.695267,29.709072,30.325443,23.852329,6050700
+1999-05-05,29.955622,29.955622,29.154339,29.647436,23.319044,9726100
+1999-05-06,29.647436,30.202169,29.154339,29.462524,23.173605,7869900
+1999-05-07,29.462524,30.078896,29.462524,29.585798,23.270563,5130400
+1999-05-10,29.585798,30.140533,29.277613,29.339251,23.076643,7528300
+1999-05-11,29.339251,29.462524,28.722879,29.031065,22.834244,10839200
+1999-05-12,29.031065,29.215977,28.106508,28.784517,22.640322,9731600
+1999-05-13,28.846153,29.462524,28.846153,28.969427,22.785757,8130900
+1999-05-14,28.969427,29.154339,28.537968,28.969427,22.785757,7845700
+1999-05-17,28.969427,28.969427,28.476332,28.599606,22.494881,6413400
+1999-05-18,28.599606,29.277613,28.414694,29.154339,22.931204,8900400
+1999-05-19,29.154339,29.770710,28.969427,29.524162,23.222088,9024200
+1999-05-20,29.524162,29.647436,29.215977,29.400888,23.125122,5402500
+1999-05-21,29.400888,29.524162,28.907791,29.092703,22.882723,4758400
+1999-05-24,29.154339,29.647436,29.154339,29.462524,23.173605,5755400
+1999-05-25,29.462524,30.017258,29.339251,29.462524,23.173605,7105800
+1999-05-26,29.462524,30.017258,29.339251,29.585798,23.270563,6230400
+1999-05-27,29.462524,29.462524,29.092703,29.215977,22.979685,4588500
+1999-05-28,29.215977,29.215977,28.537968,28.722879,22.591841,5255200
+1999-06-01,28.722879,28.846153,28.229782,28.414694,22.349440,4341400
+1999-06-02,28.414694,28.846153,28.291420,28.722879,22.591841,4258100
+1999-06-03,28.722879,29.277613,28.353058,28.599606,22.494881,5971400
+1999-06-04,28.599606,30.448717,28.599606,30.202169,23.755365,11037800
+1999-06-07,30.202169,30.818541,29.893984,30.448717,23.949287,5604500
+1999-06-08,30.325443,30.325443,28.784517,29.215977,22.979685,8166100
+1999-06-09,29.215977,29.462524,28.784517,29.031065,22.834244,4196500
+1999-06-10,29.031065,29.215977,28.722879,28.969427,22.785757,4226500
+1999-06-11,28.969427,29.277613,28.168146,28.476332,22.397919,5664600
+1999-06-14,28.476332,28.784517,28.229782,28.599606,22.494881,3639500
+1999-06-15,28.599606,29.277613,28.599606,28.846153,22.688801,4023000
+1999-06-16,28.846153,29.277613,28.846153,28.969427,22.785757,3937500
+1999-06-17,28.969427,29.955622,28.846153,29.585798,23.270563,4831300
+1999-06-18,29.585798,30.202169,29.585798,30.078896,23.658405,7238500
+1999-06-21,30.140533,30.510355,30.140533,30.325443,23.852329,5693100
+1999-06-22,30.263807,30.263807,29.524162,29.709072,23.367527,4098900
+1999-06-23,29.709072,29.709072,28.969427,29.154339,22.931204,4627400
+1999-06-24,29.154339,29.647436,29.031065,29.154339,22.931204,4063800
+1999-06-25,29.154339,29.524162,29.154339,29.277613,23.028162,4085200
+1999-06-28,29.277613,29.400888,28.846153,28.907791,22.737282,3449900
+1999-06-29,28.907791,29.709072,28.784517,29.709072,23.367527,5078800
+1999-06-30,29.709072,30.448717,28.722879,30.387081,23.900812,9641900
+1999-07-01,29.524162,29.524162,28.537968,28.599606,22.494881,13135300
+1999-07-02,28.476332,28.476332,27.736687,27.921598,21.961599,11736800
+1999-07-06,27.921598,27.921598,27.428501,27.551775,21.670713,10933600
+1999-07-07,27.551775,28.353058,27.551775,27.675049,21.767673,11955900
+1999-07-08,27.675049,27.675049,27.305227,27.428501,21.573751,10492200
+1999-07-09,27.428501,27.490139,26.935404,27.243589,21.428310,11075300
+1999-07-12,27.305227,27.983234,27.305227,27.428501,21.573751,15567300
+1999-07-13,27.428501,27.675049,26.935404,27.181953,21.379835,10639100
+1999-07-14,27.181953,27.675049,27.058678,27.120316,21.331350,10214900
+1999-07-15,27.243589,27.921598,27.243589,27.736687,21.816147,8832100
+1999-07-16,27.736687,27.921598,27.243589,27.613413,21.719194,8369600
+1999-07-19,27.613413,27.798323,27.181953,27.428501,21.573751,7573500
+1999-07-20,27.428501,27.428501,26.997042,26.997042,21.234396,7148200
+1999-07-21,26.997042,27.428501,26.627218,26.812130,21.088947,9749600
+1999-07-22,26.812130,27.120316,26.442308,26.627218,20.943508,8842000
+1999-07-23,26.627218,26.750494,26.319033,26.442308,20.798069,6093400
+1999-07-26,26.565582,27.181953,26.565582,27.120316,21.331350,11983000
+1999-07-27,27.305227,27.921598,27.305227,27.859961,21.913115,11464000
+1999-07-28,27.921598,28.353058,27.921598,27.983234,22.010077,7864000
+1999-07-29,27.983234,27.983234,27.243589,27.366863,21.525272,4806400
+1999-07-30,27.366863,27.675049,26.997042,27.181953,21.379835,4536700
+1999-08-02,27.181953,27.490139,26.812130,26.812130,21.088947,3527900
+1999-08-03,26.812130,27.058678,26.072485,26.442308,20.798069,7460700
+1999-08-04,26.442308,26.565582,25.641026,25.641026,20.167822,6699900
+1999-08-05,25.641026,26.134123,25.456114,25.517752,20.070860,6488500
+1999-08-06,25.517752,25.825937,25.147928,25.147928,19.779978,4980500
+1999-08-09,25.147928,25.764299,24.839743,25.394478,19.973902,4908500
+1999-08-10,25.394478,25.764299,24.901381,25.271204,19.876944,4685000
+1999-08-11,25.332840,25.764299,25.332840,25.517752,20.070860,4510300
+1999-08-12,25.517752,26.010849,25.517752,25.764299,20.264784,5060500
+1999-08-13,26.257397,26.873768,26.257397,26.812130,21.088947,5959200
+1999-08-16,27.181953,27.613413,27.181953,27.551775,21.670713,6132200
+1999-08-17,27.675049,28.106508,27.675049,28.106508,22.107038,10975500
+1999-08-18,28.106508,28.907791,28.106508,28.784517,22.640322,11620000
+1999-08-19,28.784517,28.969427,28.414694,28.969427,22.785757,10027200
+1999-08-20,29.092703,29.585798,29.092703,29.462524,23.173605,10079700
+1999-08-23,29.462524,29.770710,29.215977,29.585798,23.270563,6508800
+1999-08-24,29.585798,29.709072,29.277613,29.400888,23.125122,6312300
+1999-08-25,29.400888,29.585798,28.784517,29.462524,23.173605,5392500
+1999-08-26,29.462524,29.524162,28.907791,29.092703,22.882723,4938800
+1999-08-27,29.092703,29.215977,28.537968,28.722879,22.591841,4029800
+1999-08-30,28.722879,28.907791,27.983234,28.106508,22.107038,3992100
+1999-08-31,28.106508,28.106508,27.120316,27.366863,21.525272,5143300
+1999-09-01,27.366863,27.675049,26.812130,27.675049,21.767673,4682600
+1999-09-02,27.428501,27.428501,26.935404,27.366863,21.525272,3194800
+1999-09-03,27.366863,27.859961,27.305227,27.551775,21.670713,3628500
+1999-09-07,27.551775,28.476332,27.366863,28.291420,22.252476,5804800
+1999-09-08,28.291420,29.215977,28.106508,28.661243,22.543358,6248700
+1999-09-09,28.661243,29.031065,28.106508,28.353058,22.300962,4405000
+1999-09-10,28.229782,28.229782,27.428501,27.490139,21.622236,4538000
+1999-09-13,27.490139,27.798323,27.243589,27.675049,21.767673,4151500
+1999-09-14,27.551775,27.551775,27.243589,27.551775,21.670713,4421600
+1999-09-15,27.551775,27.798323,27.120316,27.120316,21.331350,3826000
+1999-09-16,27.120316,27.490139,26.997042,27.120316,21.331350,3154900
+1999-09-17,27.305227,27.983234,27.305227,27.859961,21.913115,4872000
+1999-09-20,27.859961,28.106508,27.736687,27.798323,21.864634,2908400
+1999-09-21,27.798323,27.921598,27.181953,27.305227,21.476791,4853100
+1999-09-22,27.305227,27.490139,26.997042,26.997042,21.234396,4024100
+1999-09-23,26.997042,27.181953,26.257397,26.257397,20.652630,5828500
+1999-09-24,26.257397,26.319033,25.641026,25.949211,20.410225,4151100
+1999-09-27,25.949211,26.072485,25.641026,25.764299,20.264784,5173200
+1999-09-28,25.764299,26.873768,25.702663,26.072485,20.507183,7634700
+1999-09-29,26.072485,26.257397,25.579388,25.825937,20.313261,5412300
+1999-09-30,25.825937,25.825937,24.778107,25.641026,20.167822,9116300
+1999-10-01,25.271204,25.271204,24.778107,25.024654,19.683016,6439900
+1999-10-04,25.024654,25.332840,24.901381,25.271204,19.876944,3827100
+1999-10-05,25.271204,25.641026,24.531559,24.593195,19.343658,6881400
+1999-10-06,24.716469,25.517752,24.716469,25.271204,19.876944,5544800
+1999-10-07,25.271204,25.764299,25.147928,25.271204,19.918139,6317300
+1999-10-08,25.271204,25.271204,24.839743,24.963018,19.675236,4947200
+1999-10-11,24.963018,25.332840,24.778107,24.901381,19.626657,5028600
+1999-10-12,24.901381,24.963018,24.654833,24.654833,19.432331,4594500
+1999-10-13,24.654833,24.716469,24.100100,24.285009,19.140850,5528400
+1999-10-14,24.285009,24.716469,23.791914,24.593195,19.383751,6045300
+1999-10-15,24.593195,24.654833,23.730276,23.791914,18.752201,5445300
+1999-10-18,23.791914,23.915188,23.052269,23.298817,18.363548,6140300
+1999-10-19,23.545364,24.408283,23.545364,23.668638,18.655045,6527900
+1999-10-20,24.038462,24.716469,24.038462,24.531559,19.335169,6253200
+1999-10-21,24.469921,24.469921,24.038462,24.285009,19.140850,5363600
+1999-10-22,24.285009,25.702663,24.038462,25.702663,20.258205,7138400
+1999-10-25,25.394478,25.394478,24.963018,25.271204,19.918139,4377100
+1999-10-26,25.271204,26.072485,24.901381,24.901381,19.626657,6696800
+1999-10-27,24.901381,25.209566,24.654833,25.147928,19.820976,5155200
+1999-10-28,25.147928,25.949211,25.024654,25.887573,20.403944,7710400
+1999-10-29,25.887573,27.490139,25.887573,26.134123,20.598272,7239400
+1999-11-01,25.702663,25.702663,24.593195,25.332840,19.966717,7585800
+1999-11-02,25.332840,26.072485,25.147928,26.072485,20.549688,6240800
+1999-11-03,26.319033,26.997042,26.319033,26.503944,20.889753,8196800
+1999-11-04,26.503944,28.599606,25.641026,26.134123,20.598272,13492400
+1999-11-05,25.147928,25.147928,23.730276,23.976824,18.897940,26165000
+1999-11-08,23.915188,23.915188,23.052269,23.175543,18.266392,14218600
+1999-11-09,23.175543,23.791914,23.175543,23.298817,18.363548,14069200
+1999-11-10,23.298817,23.853550,23.175543,23.545364,18.557877,11321600
+1999-11-11,23.545364,24.038462,23.113905,23.976824,18.897940,9720900
+1999-11-12,23.976824,25.702663,23.730276,24.716469,19.650644,10986100
+1999-11-15,25.147928,26.257397,25.147928,25.517752,20.287699,15236800
+1999-11-16,25.517752,25.887573,25.456114,25.764299,20.483713,11693400
+1999-11-17,25.764299,26.010849,25.147928,25.641026,20.385704,10157000
+1999-11-18,25.641026,26.812130,25.641026,26.750494,21.267776,11389200
+1999-11-19,26.750494,26.812130,26.257397,26.380671,20.973749,6558400
+1999-11-22,26.380671,26.873768,26.134123,26.873768,21.365788,6706000
+1999-11-23,26.873768,26.935404,26.319033,26.627218,21.169769,7789100
+1999-11-24,26.812130,28.291420,26.812130,27.859961,22.149855,10519600
+1999-11-26,27.859961,28.291420,26.873768,27.181953,21.610809,4464200
+1999-11-29,27.490139,28.537968,27.490139,28.044872,22.296864,9894400
+1999-11-30,28.044872,28.537968,27.305227,27.490139,21.855827,8684600
+1999-12-01,27.490139,27.798323,27.181953,27.613413,21.953833,7080900
+1999-12-02,27.613413,27.736687,27.120316,27.474655,21.843521,8515700
+1999-12-03,27.474655,28.537968,27.366863,27.859961,22.149855,10163400
+1999-12-06,27.859961,27.859961,27.305227,27.428501,21.806826,5916300
+1999-12-07,27.428501,27.675049,27.058678,27.120316,21.561800,8214000
+1999-12-08,27.120316,27.613413,26.750494,26.873768,21.365788,9954400
+1999-12-09,26.935404,27.551775,26.935404,27.243589,21.659813,11966800
+1999-12-10,27.366863,28.044872,27.366863,27.659567,21.990530,11107300
+1999-12-13,27.490139,27.490139,26.997042,27.243589,21.659813,8867200
+1999-12-14,27.243589,28.353058,26.935404,28.106508,22.345869,8453100
+1999-12-15,28.106508,28.969427,27.490139,28.476332,22.639893,10543700
+1999-12-16,27.921598,27.921598,27.428501,27.613413,21.953833,6311300
+1999-12-17,27.613413,27.983234,27.366863,27.859961,22.149855,11931400
+1999-12-20,27.859961,28.353058,27.490139,27.752071,22.064079,6948200
+1999-12-21,27.983234,28.784517,27.983234,28.044872,22.296864,7119600
+1999-12-22,28.044872,28.661243,27.736687,28.599606,22.737900,5294900
+1999-12-23,28.599606,28.969427,28.476332,28.722879,22.835911,4760200
+1999-12-27,28.722879,29.585798,28.722879,28.907791,22.982920,8227900
+1999-12-28,28.907791,29.339251,28.414694,28.599606,22.737900,4180600
+1999-12-29,28.599606,28.846153,28.168146,28.229782,22.443880,4098400
+1999-12-30,28.414694,28.969427,28.414694,28.722879,22.835911,4360900
+1999-12-31,28.722879,28.969427,28.414694,28.846153,22.933916,2946900
+2000-01-03,28.846153,29.524162,28.353058,29.462524,23.423954,8404800
+2000-01-04,29.585798,31.434912,29.585798,31.188362,24.796074,16056100
+2000-01-05,31.188362,32.667652,31.188362,32.482742,25.825165,19829900
+2000-01-06,32.482742,32.667652,31.188362,31.188362,24.796074,7905600
+2000-01-07,31.188362,31.681459,30.387081,30.695267,24.404043,6775600
+2000-01-10,33.530571,35.441322,33.530571,35.379684,28.128357,33976400
+2000-01-11,35.379684,36.242603,34.763313,35.749508,28.422379,20141000
+2000-01-12,35.441322,35.441322,32.852566,33.222385,26.413210,13105000
+2000-01-13,33.345661,34.516766,33.345661,34.331856,27.295284,8735100
+2000-01-14,34.331856,34.516766,32.975838,33.099113,26.315199,8154000
+2000-01-18,33.099113,34.331856,32.606014,34.023670,27.050259,8104200
+2000-01-19,34.023670,34.455128,33.592209,34.270218,27.246283,6553300
+2000-01-20,34.270218,34.948223,32.975838,33.160751,26.364206,7214300
+2000-01-21,33.160751,33.160751,32.236195,32.297829,25.678144,8286700
+2000-01-24,32.297829,33.530571,31.928007,32.852566,26.119188,8345600
+2000-01-25,35.996056,37.475346,35.996056,36.982250,29.402462,29920500
+2000-01-26,36.920612,36.920612,35.872780,36.119328,28.716400,10403900
+2000-01-27,36.119328,37.105522,36.119328,36.612427,29.108433,9821500
+2000-01-28,36.612427,36.982250,35.934418,36.242603,28.814411,7503200
+2000-01-31,35.934418,35.934418,35.318047,35.811146,28.471382,7911500
+2000-02-01,35.811146,35.934418,35.441322,35.502960,28.226366,7239400
+2000-02-02,35.502960,37.475346,35.318047,36.735699,29.206438,9414200
+2000-02-03,37.043884,37.968441,37.043884,37.352070,29.696484,10911500
+2000-02-04,37.352070,37.721893,36.489151,37.598618,29.892492,6274900
+2000-02-07,37.598618,37.906803,37.043884,37.475346,29.794489,6155400
+2000-02-08,37.413708,37.413708,36.489151,37.043884,29.451466,5782000
+2000-02-09,37.043884,38.461540,36.612427,37.167160,29.549467,6160200
+2000-02-10,37.167160,37.352070,36.365879,36.427513,28.961412,5677600
+2000-02-11,36.427513,38.091717,36.304241,37.352070,29.696484,7249200
+2000-02-14,36.858974,36.858974,36.119328,36.489151,29.010420,5203200
+2000-02-15,36.489151,36.674065,34.886589,36.365879,28.912424,7084900
+2000-02-16,36.365879,36.735699,36.057693,36.427513,28.961412,4476600
+2000-02-17,36.365879,36.365879,35.194775,35.996056,28.618397,4757700
+2000-02-18,35.872780,35.872780,34.824951,35.009861,27.834328,6495800
+2000-02-22,34.948223,34.948223,32.174557,32.914200,26.168190,12748100
+2000-02-23,32.914200,34.146942,32.174557,33.653847,26.756239,8374300
+2000-02-24,33.592209,33.592209,31.989645,32.729290,26.021173,8341200
+2000-02-25,32.606014,32.606014,30.571993,30.571993,24.306028,8443200
+2000-02-28,31.558186,32.482742,31.558186,32.297829,25.678144,7946100
+2000-02-29,32.297829,33.530571,31.804733,33.530571,26.658230,5508100
+2000-03-01,33.530571,34.270218,32.729290,34.208580,27.197277,7252600
+2000-03-02,34.146942,34.146942,33.345661,34.023670,27.050259,6590200
+2000-03-03,34.393490,35.996056,34.393490,35.996056,28.618397,14564200
+2000-03-06,35.996056,36.242603,35.071499,35.502960,28.226366,7343200
+2000-03-07,35.502960,35.872780,34.948223,35.502960,28.226366,7590400
+2000-03-08,35.441322,35.441322,34.393490,34.455128,27.393288,6296500
+2000-03-09,34.455128,35.749508,34.146942,35.749508,28.422379,5624700
+2000-03-10,35.256409,35.256409,34.208580,34.640041,27.540310,7064100
+2000-03-13,34.640041,35.009861,32.544380,34.455128,27.393288,5249000
+2000-03-14,34.270218,34.270218,33.345661,33.653847,26.756239,5591200
+2000-03-15,33.653847,34.455128,33.160751,34.331856,27.295284,6645000
+2000-03-16,34.331856,35.009861,33.284023,34.824951,27.687315,8337700
+2000-03-17,34.824951,36.427513,34.085304,36.304241,28.863409,8756300
+2000-03-20,37.167160,38.954636,37.167160,38.892998,30.921579,16990800
+2000-03-21,38.892998,39.509369,38.091717,38.646450,30.725565,13366200
+2000-03-22,39.201183,40.865383,39.201183,39.940830,31.754665,13730100
+2000-03-23,39.940830,41.358482,39.262821,40.988659,32.587723,8166300
+2000-03-24,40.988659,41.173569,40.495564,40.865383,32.489716,6632400
+2000-03-27,40.865383,41.605030,39.447731,41.173569,32.734741,5799400
+2000-03-28,41.173569,41.913216,40.064102,40.372288,32.097683,7594900
+2000-03-29,40.372288,40.618835,39.694279,40.249012,31.999668,5164000
+2000-03-30,40.249012,40.433926,39.201183,39.694279,31.558641,5420300
+2000-03-31,39.694279,40.927021,39.694279,40.680473,32.342701,5359900
+2000-04-03,40.680473,41.296844,40.495564,41.235207,32.783745,4872100
+2000-04-04,41.235207,41.358482,37.845169,40.557198,32.244694,7052100
+2000-04-05,40.187378,40.187378,38.708088,39.447731,31.362629,5141700
+2000-04-06,39.447731,40.680473,39.447731,39.879192,31.705645,3616600
+2000-04-07,39.940830,40.803749,39.940830,40.433926,32.146687,3299000
+2000-04-10,40.433926,40.803749,39.694279,40.742111,32.391708,3666700
+2000-04-11,41.173569,41.913216,41.173569,41.913216,33.322781,6925500
+2000-04-12,41.913216,43.022682,39.447731,39.509369,31.411633,8416900
+2000-04-13,39.509369,40.187378,38.708088,38.708088,30.774578,5080000
+2000-04-14,38.708088,38.954636,37.167160,38.338264,30.480549,8250800
+2000-04-17,38.214989,38.214989,36.550789,38.214989,30.382536,6539200
+2000-04-18,38.214989,39.201183,37.475346,38.954636,30.970592,4276800
+2000-04-19,39.447731,40.927021,39.447731,40.372288,32.097683,8581800
+2000-04-20,40.372288,41.913216,40.249012,41.666668,33.126770,5440500
+2000-04-24,40.865383,40.865383,39.571007,39.694279,31.558641,3845300
+2000-04-25,39.694279,40.002464,39.139545,39.940830,31.754665,3750500
+2000-04-26,40.187378,41.913216,40.187378,41.789940,33.224777,6004500
+2000-04-27,41.666668,41.666668,40.310650,41.050297,32.636734,4676200
+2000-04-28,41.666668,43.022682,41.666668,43.022682,34.204861,11734900
+2000-05-01,43.022682,43.269230,41.420120,41.420120,32.930759,8921900
+2000-05-02,41.420120,42.467949,39.755917,42.406311,33.714821,9299400
+2000-05-03,41.420120,41.420120,39.077908,40.680473,32.342701,6515700
+2000-05-04,40.680473,41.358482,38.831360,39.139545,31.117609,7129600
+2000-05-05,39.139545,39.879192,38.708088,39.016273,31.019594,3472400
+2000-05-08,39.016273,40.557198,39.016273,40.002464,31.803656,3583300
+2000-05-09,40.002464,41.050297,39.201183,40.187378,31.950674,3408100
+2000-05-10,40.187378,41.173569,39.940830,40.495564,32.195698,6371800
+2000-05-11,40.495564,40.803749,40.002464,40.557198,32.244694,3335500
+2000-05-12,40.557198,40.803749,39.509369,40.249012,31.999668,3105500
+2000-05-15,40.249012,40.557198,39.632645,40.249012,31.999668,3893200
+2000-05-16,40.680473,42.036488,40.680473,41.605030,33.077766,5940600
+2000-05-17,41.605030,42.283039,41.481754,41.666668,33.126770,4091000
+2000-05-18,41.666668,42.221401,40.433926,40.618835,32.293701,4394900
+2000-05-19,40.618835,40.680473,39.201183,40.495564,32.195698,4462700
+2000-05-22,40.495564,40.927021,39.447731,40.310650,32.048683,4201800
+2000-05-23,40.125740,40.125740,39.201183,39.509369,31.411633,4225700
+2000-05-24,39.509369,40.249012,37.660255,38.769722,30.823582,4407200
+2000-05-25,38.769722,39.879192,38.523174,39.262821,31.215612,3920200
+2000-05-26,39.262821,39.632645,38.708088,39.324459,31.264616,3369200
+2000-05-30,40.064102,41.420120,40.064102,41.296844,32.832748,7349000
+2000-05-31,41.296844,41.851578,41.050297,41.605030,33.077766,5182500
+2000-06-01,41.605030,41.974854,40.557198,41.173569,32.734741,3899700
+2000-06-02,41.173569,41.420120,39.694279,39.879192,31.705645,5838700
+2000-06-05,39.879192,40.433926,39.447731,40.064102,31.852661,4220200
+2000-06-06,40.064102,40.988659,39.879192,40.310650,32.048683,3843100
+2000-06-07,40.310650,40.372288,39.571007,39.632645,31.509638,3374600
+2000-06-08,39.632645,40.557198,39.571007,39.879192,31.705645,3074300
+2000-06-09,39.940830,41.420120,39.940830,40.988659,32.587723,4018700
+2000-06-12,40.988659,41.111935,39.817554,40.156509,31.926136,3475500
+2000-06-13,40.156509,40.927021,39.632645,39.632645,31.509638,5001900
+2000-06-14,39.879192,40.495564,39.879192,40.187378,31.950674,2942000
+2000-06-15,40.310650,41.420120,40.310650,41.420120,32.930759,4223500
+2000-06-16,41.420120,41.666668,40.495564,40.618835,32.293701,4975000
+2000-06-19,40.618835,41.420120,40.557198,41.358482,32.881752,2712800
+2000-06-20,41.358482,41.481754,40.988659,41.173569,32.734741,3296900
+2000-06-21,41.173569,41.481754,40.927021,41.173569,32.734741,2933100
+2000-06-22,41.173569,41.235207,40.495564,40.927021,32.538723,2865200
+2000-06-23,40.742111,40.742111,38.831360,39.077908,31.068594,5520800
+2000-06-26,39.077908,39.940830,39.016273,39.940830,31.754665,3823100
+2000-06-27,39.940830,39.940830,39.016273,39.447731,31.362629,3377800
+2000-06-28,39.447731,39.632645,38.091717,38.615582,30.701033,3166300
+2000-06-29,38.615582,39.571007,37.536983,38.954636,30.970592,4803200
+2000-06-30,38.892998,38.892998,37.228798,38.276627,30.431547,6612500
+2000-07-03,38.153355,38.153355,37.475346,38.030079,30.235525,2324200
+2000-07-05,38.030079,38.338264,37.352070,37.536983,29.843496,4212600
+2000-07-06,37.536983,38.646450,36.920612,36.982250,29.402462,3874200
+2000-07-07,36.982250,37.598618,35.996056,37.475346,29.794489,9326800
+2000-07-10,37.475346,37.906803,36.920612,37.290436,29.647482,4126400
+2000-07-11,37.228798,37.228798,35.502960,35.502960,28.226366,3836100
+2000-07-12,35.564594,37.598618,35.564594,35.564594,28.275366,4154500
+2000-07-13,35.749508,37.167160,35.749508,36.612427,29.108433,5092800
+2000-07-14,36.612427,36.982250,35.749508,36.920612,29.353456,4111700
+2000-07-17,36.920612,36.920612,35.626232,36.057693,28.667393,4455300
+2000-07-18,35.687870,35.687870,34.886589,35.256409,28.030340,4698800
+2000-07-19,35.934418,37.043884,35.934418,36.489151,29.010420,5999500
+2000-07-20,36.982250,38.769722,36.982250,38.091717,30.284536,5474500
+2000-07-21,38.091717,38.153355,36.674065,36.920612,29.353456,3784500
+2000-07-24,36.735699,36.735699,36.057693,36.119328,28.716400,2981500
+2000-07-25,36.119328,37.105522,35.811146,36.982250,29.402462,3742500
+2000-07-26,36.982250,37.721893,36.550789,37.536983,29.843496,4710400
+2000-07-27,37.536983,37.968441,37.043884,37.475346,29.794489,2734800
+2000-07-28,37.475346,37.660255,36.674065,37.352070,29.696484,2514000
+2000-07-31,37.475346,38.214989,37.475346,38.030079,30.235525,4193500
+2000-08-01,38.461540,39.755917,38.461540,38.954636,30.970592,8354900
+2000-08-02,39.016273,39.632645,39.016273,39.571007,31.460640,4613400
+2000-08-03,39.571007,42.406311,38.892998,41.913216,33.322781,4453000
+2000-08-04,41.913216,42.221401,40.372288,41.851578,33.273781,14963400
+2000-08-07,41.420120,41.420120,40.002464,40.495564,32.195698,6139500
+2000-08-08,40.803749,41.543392,40.803749,40.803749,32.440716,5026700
+2000-08-09,40.803749,41.173569,39.755917,40.125740,31.901669,3654900
+2000-08-10,40.125740,40.187378,39.016273,39.940830,31.754665,3670500
+2000-08-11,39.940830,40.803749,39.817554,40.064102,31.852661,4938100
+2000-08-14,40.064102,40.433926,39.817554,40.372288,32.097683,2839000
+2000-08-15,40.372288,40.557198,39.447731,39.940830,31.754665,2918000
+2000-08-16,39.817554,39.817554,38.708088,39.324459,31.264616,2646700
+2000-08-17,39.324459,39.632645,38.769722,39.632645,31.509638,2357900
+2000-08-18,39.447731,39.447731,38.214989,38.584812,30.676567,4546300
+2000-08-21,38.584812,38.708088,37.598618,37.906803,30.137512,3069900
+2000-08-22,37.906803,37.906803,37.167160,37.413708,29.745487,3706300
+2000-08-23,37.413708,37.845169,36.920612,36.920612,29.353456,3305700
+2000-08-24,37.105522,37.845169,37.105522,37.352070,29.696484,5506100
+2000-08-25,37.475346,37.845169,37.475346,37.598618,29.892492,2228200
+2000-08-28,37.598618,37.906803,37.228798,37.536983,29.843496,3044200
+2000-08-29,37.536983,37.968441,37.105522,37.906803,30.137512,3610500
+2000-08-30,37.906803,38.338264,37.598618,37.845169,30.088518,3231600
+2000-08-31,37.845169,39.262821,37.783531,38.415287,30.541775,4499700
+2000-09-01,38.415287,38.831360,37.598618,38.461540,30.578562,1997800
+2000-09-05,38.461540,38.892998,38.030079,38.091717,30.284536,3263200
+2000-09-06,38.276627,39.447731,38.276627,39.447731,31.362629,4413800
+2000-09-07,39.447731,40.927021,39.077908,40.927021,32.538723,4185000
+2000-09-08,40.125740,40.125740,39.139545,39.694279,31.558641,3329400
+2000-09-11,39.632645,39.632645,38.646450,39.386093,31.313620,2501800
+2000-09-12,39.201183,39.201183,38.030079,38.091717,30.284536,3296000
+2000-09-13,38.276627,39.447731,38.276627,39.447731,31.362629,3565600
+2000-09-14,38.892998,38.892998,37.968441,38.831360,30.872587,3106100
+2000-09-15,38.831360,39.262821,38.276627,38.646450,30.725565,5009500
+2000-09-18,38.523174,38.523174,37.475346,37.906803,30.137512,2674400
+2000-09-19,37.906803,37.968441,36.982250,37.783531,30.039516,3024500
+2000-09-20,37.783531,37.906803,36.489151,36.674065,29.157436,4412400
+2000-09-21,36.674065,37.228798,36.489151,36.612427,29.108433,3112400
+2000-09-22,36.612427,37.290436,36.304241,36.920612,29.353456,3800000
+2000-09-25,36.920612,37.598618,36.797337,37.043884,29.451466,2628200
+2000-09-26,37.043884,37.660255,36.982250,37.167160,29.549467,3173400
+2000-09-27,37.167160,37.845169,36.982250,37.043884,29.451466,2634700
+2000-09-28,37.228798,38.892998,37.228798,38.276627,30.431547,4138800
+2000-09-29,38.276627,38.646450,37.413708,37.721893,29.990503,4847500
+2000-10-02,37.968441,39.324459,37.968441,38.954636,30.970592,3631000
+2000-10-03,38.954636,39.571007,38.708088,38.769722,30.823582,4725000
+2000-10-04,39.077908,39.632645,39.077908,39.324459,31.264616,4144900
+2000-10-05,39.447731,40.927021,39.447731,40.680473,32.342701,5606000
+2000-10-06,40.680473,40.742111,39.324459,40.125740,31.901669,4214400
+2000-10-09,40.125740,40.495564,40.002464,40.372288,32.097683,3033600
+2000-10-10,40.557198,41.358482,40.557198,40.803749,32.440716,4951000
+2000-10-11,40.803749,40.988659,39.509369,39.755917,31.607643,5268000
+2000-10-12,39.694279,39.694279,38.214989,38.461540,30.578562,5240500
+2000-10-13,38.461540,39.509369,37.536983,39.201183,31.166597,5814300
+2000-10-16,38.584812,38.584812,36.982250,37.721893,29.990503,5117700
+2000-10-17,37.352070,37.352070,33.777119,34.948223,27.785318,8390500
+2000-10-18,34.948223,35.441322,33.037476,35.009861,27.834328,9476400
+2000-10-19,35.009861,35.502960,33.962032,34.455128,27.393288,5557300
+2000-10-20,34.455128,36.674065,33.838757,35.872780,28.520386,6060600
+2000-10-23,35.872780,36.180965,35.256409,35.687870,28.373375,3606800
+2000-10-24,35.687870,36.304241,35.071499,35.687870,28.373375,4141300
+2000-10-25,35.194775,35.194775,33.838757,34.331856,27.295284,3746400
+2000-10-26,34.331856,35.009861,34.085304,34.640041,27.540310,5495100
+2000-10-27,34.331856,34.331856,33.407299,33.838757,26.903248,4568000
+2000-10-30,33.838757,35.811146,33.345661,34.393490,27.344290,4316900
+2000-10-31,34.393490,35.379684,33.592209,35.318047,28.079348,5290400
+2000-11-01,35.318047,36.489151,34.331856,36.119328,28.716400,5137500
+2000-11-02,36.119328,37.105522,36.119328,36.797337,29.255447,4407700
+2000-11-03,36.797337,37.721893,36.489151,37.536983,29.843496,4423800
+2000-11-06,37.536983,37.598618,36.489151,36.797337,29.255447,2930300
+2000-11-07,36.674065,36.674065,35.996056,36.612427,29.108433,2617700
+2000-11-08,36.612427,37.475346,36.304241,36.365879,28.912424,3871200
+2000-11-09,33.037476,33.037476,30.387081,30.695267,24.404043,25803200
+2000-11-10,30.756903,31.743097,30.756903,31.250000,24.845079,13465800
+2000-11-13,30.880178,30.880178,29.585798,30.017258,23.864998,11861200
+2000-11-14,30.202169,31.311638,30.202169,30.510355,24.257029,7009100
+2000-11-15,30.695267,32.051281,30.695267,31.373274,24.943090,6093000
+2000-11-16,31.373274,31.681459,29.955622,30.448717,24.208025,4127100
+2000-11-17,30.448717,30.756903,29.277613,29.770710,23.668976,6242200
+2000-11-20,29.770710,30.078896,29.154339,29.462524,23.423954,4917100
+2000-11-21,29.092703,29.092703,28.168146,28.969427,23.031923,5936700
+2000-11-22,28.907791,28.907791,28.106508,28.661243,22.786903,4975300
+2000-11-24,28.661243,29.524162,28.168146,29.339251,23.325953,3158100
+2000-11-27,29.339251,30.140533,29.215977,29.524162,23.472965,4939000
+2000-11-28,29.524162,30.017258,29.154339,29.462524,23.423954,4969600
+2000-11-29,29.462524,29.462524,27.983234,28.414694,22.590887,5102000
+2000-11-30,28.414694,28.784517,27.490139,28.537968,22.688896,5965300
+2000-12-01,28.722879,30.325443,28.722879,29.585798,23.521967,5581700
+2000-12-04,29.462524,29.462524,28.784517,29.031065,23.080933,4153200
+2000-12-05,29.031065,30.695267,28.661243,30.263807,24.061012,5059100
+2000-12-06,30.695267,31.558186,30.695267,31.003452,24.818899,5315000
+2000-12-07,31.003452,31.003452,29.647436,30.202169,24.177458,4135300
+2000-12-08,30.202169,30.880178,30.017258,30.510355,24.424168,2897200
+2000-12-11,30.510355,30.633629,29.832348,29.955622,23.980095,3239300
+2000-12-12,29.955622,30.140533,29.339251,29.893984,23.930754,5853500
+2000-12-13,29.893984,30.263807,29.215977,29.339251,23.486673,5115400
+2000-12-14,29.339251,29.462524,28.229782,28.599606,22.894571,7115200
+2000-12-15,28.599606,29.339251,27.859961,28.969427,23.190619,9259500
+2000-12-18,28.969427,29.524162,28.414694,28.907791,23.141281,5142100
+2000-12-19,28.907791,29.215977,28.661243,28.722879,22.993258,5938000
+2000-12-20,27.366863,27.366863,26.442308,26.688856,21.364977,9134600
+2000-12-21,26.688856,27.305227,26.134123,27.305227,21.858397,6425700
+2000-12-22,27.058678,27.058678,25.641026,26.072485,20.871561,7757800
+2000-12-26,26.380671,26.750494,26.380671,26.565582,21.266296,4884300
+2000-12-27,26.565582,28.168146,26.565582,27.921598,22.351818,8591000
+2000-12-28,27.921598,28.476332,27.243589,28.353058,22.697205,4715200
+2000-12-29,28.353058,28.722879,28.044872,28.537968,22.845230,4629300
+2001-01-02,28.044872,28.044872,27.243589,27.551775,22.055763,6547200
+2001-01-03,27.613413,30.941814,27.613413,30.633629,24.522852,10282100
+2001-01-04,30.633629,32.975838,30.448717,31.496548,25.213638,10503500
+2001-01-05,31.496548,32.174557,30.880178,31.065088,24.868240,6499400
+2001-01-08,31.065088,31.126726,29.154339,29.709072,23.782726,5068500
+2001-01-09,29.709072,30.571993,29.462524,29.709072,23.782726,4770200
+2001-01-10,29.709072,30.140533,29.462524,29.955622,23.980095,3363000
+2001-01-11,29.955622,30.941814,28.907791,29.092703,23.289309,5961500
+2001-01-12,29.339251,31.434912,29.339251,31.126726,24.917582,8355300
+2001-01-16,31.373274,32.790928,31.373274,32.606014,26.101780,6506700
+2001-01-17,32.606014,33.530571,32.112919,32.914200,26.348495,6343700
+2001-01-18,32.914200,32.975838,31.619822,32.667652,26.151123,4831800
+2001-01-19,32.667652,32.975838,31.928007,32.236195,25.805740,5079300
+2001-01-22,30.880178,30.880178,30.078896,30.695267,24.572193,6231200
+2001-01-23,30.695267,30.818541,29.462524,29.585798,23.684036,8185400
+2001-01-24,29.585798,30.633629,29.462524,30.078896,24.078773,8368500
+2001-01-25,30.078896,30.325443,29.585798,29.647436,23.733383,5868100
+2001-01-26,29.524162,29.524162,28.907791,29.400888,23.536016,9087700
+2001-01-29,29.161736,30.571993,29.161736,30.394478,24.331406,5216500
+2001-01-30,30.394478,30.867849,29.536489,30.473373,24.394564,4833800
+2001-01-31,30.473373,30.493097,29.911243,30.029585,24.039307,5117100
+2001-02-01,30.029585,31.262327,29.842209,31.065088,24.868240,5896700
+2001-02-02,30.818541,30.818541,30.128204,30.325443,24.276140,5302200
+2001-02-05,30.325443,30.374754,29.398422,30.009861,24.023518,6055700
+2001-02-06,30.571993,31.656805,30.571993,31.173571,24.955086,9265100
+2001-02-07,31.173571,32.396450,30.571993,30.571993,24.473513,8182700
+2001-02-08,30.650888,32.051281,30.650888,31.686390,25.365606,7137700
+2001-02-09,31.686390,31.686390,30.818541,31.508875,25.223503,4972800
+2001-02-12,31.508875,32.524654,31.429979,32.061146,25.665607,5409600
+2001-02-13,32.061146,32.445759,30.867849,31.065088,24.868240,6163500
+2001-02-14,31.065088,31.469427,30.325443,30.719921,24.591927,5479400
+2001-02-15,30.818541,32.051281,30.818541,31.962524,25.586658,4729200
+2001-02-16,31.962524,32.514793,30.818541,31.568047,25.270872,4745500
+2001-02-20,31.568047,31.656805,30.700197,30.719921,24.591927,4909500
+2001-02-21,30.719921,31.558186,30.670612,31.074951,24.876135,3859700
+2001-02-22,31.074951,31.262327,30.177515,30.571993,24.473513,6013100
+2001-02-23,30.571993,30.591717,29.191322,29.881657,23.920881,5776400
+2001-02-26,29.881657,30.966469,29.487179,30.966469,24.789291,4356100
+2001-02-27,30.690336,30.690336,29.694281,30.325443,24.276140,3990100
+2001-02-28,30.325443,30.808678,29.842209,30.522682,24.434034,3659900
+2001-03-01,30.522682,30.571993,29.506903,29.921104,23.952457,6343700
+2001-03-02,29.783037,29.783037,28.284023,29.575937,23.676146,6722400
+2001-03-05,29.575937,29.585798,28.895464,29.122288,23.312988,4324800
+2001-03-06,29.122288,29.487179,28.698225,29.319527,23.470879,7377100
+2001-03-07,29.487179,29.980276,29.487179,29.566074,23.668251,6149600
+2001-03-08,29.566074,29.950691,29.339251,29.437870,23.565622,4977200
+2001-03-09,29.437870,29.753452,28.994083,29.309664,23.462988,3594700
+2001-03-12,28.984221,28.984221,26.627218,27.130178,21.718264,7220100
+2001-03-13,27.130178,27.790928,26.548323,27.633137,22.120893,8569700
+2001-03-14,27.633137,27.662722,25.936884,26.715977,21.386694,6016500
+2001-03-15,26.715977,27.731756,26.627218,27.613413,22.105104,5261700
+2001-03-16,27.603550,27.603550,26.686390,27.140039,21.726162,8243500
+2001-03-19,27.140039,27.810652,26.932940,27.623274,22.112997,3869700
+2001-03-20,27.623274,27.840237,26.272190,26.538462,21.244587,4355700
+2001-03-21,26.538462,27.514793,26.104536,27.001972,21.615635,7498600
+2001-03-22,27.001972,27.120316,25.700197,26.597633,21.291952,9120200
+2001-03-23,26.597633,27.603550,26.183432,27.435898,21.962999,7514800
+2001-03-26,27.435898,28.106508,26.873768,27.534517,22.041946,5302700
+2001-03-27,27.534517,29.349112,27.495070,28.796844,23.052464,8223600
+2001-03-28,28.116371,28.116371,27.110455,27.968441,22.389315,9152100
+2001-03-29,27.968441,28.076923,26.972387,27.712032,22.184053,3530000
+2001-03-30,27.712032,28.451677,27.248520,28.205128,22.578781,3838300
+2001-04-02,28.205128,28.570021,27.504930,27.583826,22.081421,3989900
+2001-04-03,27.347139,27.347139,26.104536,26.725838,21.394583,7321800
+2001-04-04,26.725838,27.120316,26.479290,26.725838,21.394583,6382300
+2001-04-05,26.794872,27.899408,26.794872,27.692308,22.168262,5385600
+2001-04-06,27.692308,28.096647,27.189348,28.096647,22.491943,4026200
+2001-04-09,28.096647,28.500986,27.662722,28.205128,22.578781,3350700
+2001-04-10,28.264299,29.230770,28.264299,28.846153,23.091938,4738700
+2001-04-11,28.846153,28.846153,27.928993,28.234715,22.602468,4506600
+2001-04-12,28.234715,28.786982,27.810652,28.688362,22.965626,3113900
+2001-04-16,28.688362,29.240631,28.510849,28.757397,23.020885,3462800
+2001-04-17,28.757397,29.585798,28.431953,29.585798,23.684036,5101400
+2001-04-18,29.585798,31.538462,29.289940,31.459566,25.184029,7606600
+2001-04-19,31.459566,31.558186,30.779093,31.439842,25.168242,5265000
+2001-04-20,31.232742,31.232742,30.088757,30.927021,24.757715,5264200
+2001-04-23,30.927021,30.927021,29.487179,29.615385,23.707722,5708900
+2001-04-24,29.575937,29.575937,28.185404,28.185404,22.562994,6283100
+2001-04-25,28.451677,30.512821,28.451677,30.384615,24.323511,9161000
+2001-04-26,30.571993,31.528599,30.571993,31.084812,24.884027,6731200
+2001-04-27,31.084812,31.459566,30.078896,30.502958,24.418245,4786600
+2001-04-30,30.502958,31.015779,29.832348,29.832348,23.881409,3925100
+2001-05-01,29.861933,31.065088,29.861933,30.128204,24.118248,4181700
+2001-05-02,30.276134,31.410257,30.276134,30.818541,24.670876,3779200
+2001-05-03,30.818541,30.966469,30.147928,30.532545,24.441929,4090800
+2001-05-04,30.532545,30.571993,29.881657,30.483234,24.402456,2958700
+2001-05-07,30.483234,30.739645,29.930967,30.562130,24.465614,3061600
+2001-05-08,30.552269,30.552269,29.940828,30.138067,24.126148,2679700
+2001-05-09,29.911243,29.911243,29.437870,29.437870,23.565622,3531000
+2001-05-10,29.812624,30.818541,29.812624,30.650888,24.536667,5180900
+2001-05-11,30.650888,31.262327,30.443787,30.838264,24.686666,4336100
+2001-05-14,30.838264,31.242603,30.325443,30.522682,24.434034,2825100
+2001-05-15,30.522682,30.966469,30.138067,30.670612,24.552454,4713500
+2001-05-16,30.670612,32.051281,29.881657,31.617357,25.310345,7952900
+2001-05-17,31.617357,32.790928,31.558186,32.061146,25.665607,9521600
+2001-05-18,32.061146,32.228798,31.676529,32.149902,25.736658,8058000
+2001-05-21,33.037476,34.132149,33.037476,33.806705,27.062962,10708300
+2001-05-22,33.806705,34.319527,33.195267,34.023670,27.236647,6936000
+2001-05-23,33.885601,33.885601,32.642998,32.652859,26.139284,6142500
+2001-05-24,32.652859,33.461540,32.642998,32.840237,26.289284,6039100
+2001-05-25,32.554241,32.554241,31.715977,32.189350,25.768234,6431400
+2001-05-29,32.189350,32.396450,31.558186,32.169624,25.752447,4458900
+2001-05-30,32.001972,32.001972,31.213018,31.341223,25.089294,5695700
+2001-05-31,31.341223,31.607494,30.907297,31.183432,24.962978,5027300
+2001-06-01,31.183432,31.360947,30.759369,31.282051,25.041925,3543200
+2001-06-04,31.282051,31.311638,30.779093,31.183432,24.962978,2927900
+2001-06-05,31.183432,31.193295,30.700197,30.917160,24.749821,3702000
+2001-06-06,30.927021,31.439842,30.927021,31.163708,24.947193,4086400
+2001-06-07,31.163708,31.962524,31.104536,31.706114,25.381399,4910400
+2001-06-08,31.706114,31.804733,30.956608,31.410257,25.144556,2949500
+2001-06-11,31.311638,31.311638,30.641026,31.074951,24.876135,3152600
+2001-06-12,31.074951,31.291914,30.256411,31.055227,24.860348,3524500
+2001-06-13,30.996056,30.996056,30.078896,30.374754,24.315619,3503500
+2001-06-14,30.374754,30.394478,29.240631,29.447731,23.573511,4399200
+2001-06-15,29.388559,29.388559,28.856016,29.289940,23.447199,6665200
+2001-06-18,29.142012,29.142012,28.500986,28.629190,22.918255,6176700
+2001-06-19,28.629190,29.191322,28.441814,28.451677,22.776154,5342300
+2001-06-20,28.451677,29.132151,27.909269,28.846153,23.091938,6076200
+2001-06-21,28.846153,29.871796,28.540434,29.684418,23.762987,4578700
+2001-06-22,29.585798,29.585798,28.353058,28.786982,23.044569,6097600
+2001-06-25,28.786982,28.786982,27.899408,28.589743,22.886675,5971400
+2001-06-26,28.579882,28.579882,27.564102,27.859961,22.302475,6468800
+2001-06-27,27.859961,27.978304,26.972387,27.761341,22.223524,8035700
+2001-06-28,27.761341,28.491125,27.712032,27.948717,22.373522,5154600
+2001-06-29,27.948717,28.589743,27.376726,28.491125,22.807730,5455400
+2001-07-02,28.491125,28.619329,27.682446,28.343195,22.689312,4311000
+2001-07-03,28.333334,28.333334,27.859961,27.958580,22.381418,2849700
+2001-07-05,27.958580,28.274162,27.712032,27.800789,22.255102,3559500
+2001-07-06,27.731756,27.731756,26.893492,27.327415,21.876158,4530100
+2001-07-09,27.327415,27.761341,27.031559,27.307692,21.860367,3621000
+2001-07-10,27.307692,27.416174,26.637081,26.725838,21.394583,4020600
+2001-07-11,26.725838,27.810652,26.725838,27.613413,22.105104,8328100
+2001-07-12,27.613413,27.702169,26.439842,27.238659,21.805107,7300200
+2001-07-13,27.238659,27.909269,27.149900,27.800789,22.255102,8041500
+2001-07-16,27.800789,28.224852,27.100592,27.218935,21.789316,5783900
+2001-07-17,27.218935,27.909269,27.041420,27.909269,22.341946,5352800
+2001-07-18,27.366863,27.366863,26.528599,26.637081,21.323528,5847500
+2001-07-19,26.637081,27.366863,26.400394,26.775148,21.434057,6126100
+2001-07-20,26.775148,27.021696,26.459566,26.627218,21.315638,4468700
+2001-07-23,26.627218,26.903353,26.390533,26.508875,21.220905,4681300
+2001-07-24,26.508875,26.508875,25.670612,25.788954,20.644587,5227400
+2001-07-25,25.788954,26.282051,25.710060,26.104536,20.897217,6286900
+2001-07-26,26.104536,26.725838,25.936884,26.528599,21.236687,9374700
+2001-07-27,26.528599,26.972387,26.459566,26.656805,21.339315,4069000
+2001-07-30,26.893492,27.021696,26.035503,26.232742,20.999853,6550100
+2001-07-31,26.252466,26.469427,25.818541,25.986193,20.802483,7957600
+2001-08-01,26.183432,26.489151,25.897436,26.301775,21.055115,6649900
+2001-08-02,26.676529,26.794872,26.035503,26.134123,20.920904,7910100
+2001-08-03,26.725838,27.120316,26.232742,26.232742,20.999853,9636400
+2001-08-06,26.331362,26.646942,26.252466,26.459566,21.181423,5338600
+2001-08-07,26.459566,27.268244,26.390533,26.923077,21.552471,6088800
+2001-08-08,26.893492,27.011835,26.439842,26.646942,21.331425,5407100
+2001-08-09,26.646942,27.593689,26.429979,26.863905,21.505110,5466000
+2001-08-10,26.676529,27.140039,26.499014,27.021696,21.631422,4687000
+2001-08-13,27.021696,27.209072,26.775148,26.863905,21.505110,3522300
+2001-08-14,26.863905,27.100592,26.637081,26.706114,21.378796,3734900
+2001-08-15,26.627218,26.903353,26.193295,26.252466,21.015640,4949300
+2001-08-16,26.134123,26.183432,25.788954,26.084812,20.881430,7467900
+2001-08-17,26.084812,26.331362,24.753452,25.295858,20.249855,6879200
+2001-08-20,25.345167,25.621302,25.147928,25.542406,20.447222,4233700
+2001-08-21,25.621302,25.976332,25.295858,25.443787,20.368271,4128200
+2001-08-22,25.443787,26.025640,25.266272,25.749506,20.613007,3580300
+2001-08-23,25.542406,25.946745,25.522682,25.788954,20.644587,2966600
+2001-08-24,25.808678,26.479290,25.690336,26.282051,21.039326,3811100
+2001-08-27,26.429979,26.558186,26.025640,26.035503,20.841959,3277200
+2001-08-28,26.035503,26.262327,25.295858,25.404339,20.336695,3234800
+2001-08-29,25.522682,25.522682,25.000000,25.059172,20.060381,3425600
+2001-08-30,25.009861,25.295858,24.082840,24.289940,19.444599,5246300
+2001-08-31,24.220907,25.167652,24.220907,25.078896,20.076170,4095200
+2001-09-04,24.753452,25.690336,24.654833,25.355030,20.297224,5391800
+2001-09-05,25.108480,25.483234,24.753452,25.009861,20.020910,4707000
+2001-09-06,24.674557,25.049309,23.895464,24.151875,19.334068,6543200
+2001-09-07,23.767258,24.063116,22.830376,23.777121,19.034071,8816300
+2001-09-10,23.175543,23.658777,22.928993,23.254438,18.615654,7359900
+2001-09-17,18.737673,20.710060,18.589743,18.984221,15.197260,46880900
+2001-09-18,19.714005,19.723866,17.504930,18.145956,14.526209,23462700
+2001-09-19,17.998028,18.441814,16.577909,18.244576,14.605157,25414200
+2001-09-20,17.652861,18.076923,15.285996,16.745562,13.405167,61922500
+2001-09-21,16.272190,17.948717,15.798817,17.623274,14.107793,44331700
+2001-09-24,17.751480,18.244576,17.268244,17.652861,14.131481,24317600
+2001-09-25,17.642998,17.712032,17.011835,17.218935,13.784116,16903500
+2001-09-26,17.652861,17.662722,17.061144,17.199211,13.768324,14171700
+2001-09-27,17.011835,17.544378,16.893492,17.307692,13.855164,7825000
+2001-09-28,17.751480,18.540434,17.514793,18.362919,14.699892,12820300
+2001-10-01,18.362919,18.658777,17.771204,17.771204,14.226214,7874600
+2001-10-02,17.899408,18.737673,17.850100,18.737673,14.999894,9596100
+2001-10-03,18.737673,19.773176,18.254438,19.635109,15.718305,13102200
+2001-10-04,19.704142,19.950691,19.033531,19.033531,15.236732,13181700
+2001-10-05,19.467455,19.477318,18.550297,18.934912,15.157787,9444900
+2001-10-08,18.934912,19.043392,18.155819,18.451677,14.770946,6045800
+2001-10-09,18.293886,18.836292,18.244576,18.836292,15.078839,6615200
+2001-10-10,18.540434,19.211046,18.382643,19.033531,15.236732,6647900
+2001-10-11,19.329388,20.207100,19.289940,19.575937,15.670941,13608800
+2001-10-12,19.132151,19.615385,18.836292,19.339251,15.481467,8589800
+2001-10-15,18.806705,19.112427,18.639053,18.885601,15.118313,5497100
+2001-10-16,19.132151,19.319527,18.589743,18.895464,15.126208,6480600
+2001-10-17,19.467455,19.625246,18.155819,18.244576,14.605157,9044900
+2001-10-18,18.244576,18.264299,17.564102,17.652861,14.131481,6891100
+2001-10-19,17.751480,18.234715,17.652861,18.234715,14.597262,6915200
+2001-10-22,18.007889,18.402367,17.859961,18.293886,14.644630,8646000
+2001-10-23,18.589743,18.688362,18.145956,18.264299,14.620946,6827700
+2001-10-24,18.293886,18.589743,18.096647,18.185404,14.557790,5265000
+2001-10-25,17.781065,18.836292,17.751480,18.757397,15.015678,9181000
+2001-10-26,18.491125,19.122288,18.284023,18.451677,14.770946,5992900
+2001-10-29,18.441814,18.540434,17.899408,17.998028,14.407791,5997200
+2001-10-30,18.096647,18.540434,17.761341,17.909269,14.336737,7852100
+2001-10-31,18.195267,18.688362,18.047337,18.333334,14.676211,5918600
+2001-11-01,18.382643,18.826429,18.205128,18.570021,14.865683,5533300
+2001-11-02,18.786982,18.836292,18.441814,18.609467,14.897261,6932700
+2001-11-05,18.984221,19.230770,18.737673,18.895464,15.126208,7300100
+2001-11-06,18.392506,18.491125,18.096647,18.491125,14.802522,11329000
+2001-11-07,18.244576,18.323471,18.027613,18.205128,14.573580,9951200
+2001-11-08,18.441814,19.280079,18.293886,18.579882,14.873575,8402800
+2001-11-09,17.948717,18.836292,17.800789,18.688362,14.960415,11353800
+2001-11-12,18.737673,18.737673,17.948717,18.451677,14.770946,16022800
+2001-11-13,18.688362,19.211046,18.648914,18.836292,15.078839,11781000
+2001-11-14,19.280079,19.575937,18.786982,19.053255,15.252525,15735900
+2001-11-15,18.984221,20.108480,18.737673,20.019724,16.026201,14158700
+2001-11-16,20.216963,21.055227,20.118343,20.473373,16.389355,13953000
+2001-11-19,20.956608,21.321499,20.729782,21.203156,16.973560,17448600
+2001-11-20,21.153847,21.607494,20.956608,20.976332,16.791983,11901100
+2001-11-21,20.986193,21.025640,20.128204,20.562130,16.460409,6354500
+2001-11-23,20.956608,20.996056,20.562130,20.710060,16.578829,2975900
+2001-11-26,20.857988,21.203156,20.719921,21.094675,16.886721,5086200
+2001-11-27,20.710060,21.005917,20.512821,20.779093,16.634090,7922300
+2001-11-28,20.710060,20.710060,19.881657,19.930967,15.955148,6047600
+2001-11-29,20.177515,20.207100,19.477318,19.980276,15.994621,7257900
+2001-11-30,20.236687,20.463511,19.871796,20.187376,16.160406,6916700
+2001-12-03,20.187376,20.187376,19.526627,19.694281,15.765678,7024700
+2001-12-04,19.822485,19.871796,19.230770,19.723866,15.789358,8424200
+2001-12-05,20.216963,21.597633,20.098619,21.351086,17.273355,13078200
+2001-12-06,21.597633,22.307692,21.203156,22.228796,17.983431,12061600
+2001-12-07,22.189348,22.583826,21.883629,22.041420,17.831846,8549300
+2001-12-10,21.696253,22.041420,21.005917,21.331362,17.257397,7550400
+2001-12-11,21.439842,22.337278,21.351086,22.090731,17.871733,10319500
+2001-12-12,22.090731,22.278107,21.706114,21.932940,17.744083,7322900
+2001-12-13,21.696253,21.863905,21.005917,21.301775,17.233456,8164100
+2001-12-14,21.005917,21.193295,20.088757,20.680473,16.730816,9194300
+2001-12-17,20.956608,20.996056,20.345167,20.502958,16.587206,6159300
+2001-12-18,20.710060,20.936884,20.414202,20.680473,16.730816,12775900
+2001-12-19,20.611441,20.611441,20.128204,20.414202,16.515400,9298700
+2001-12-20,20.424063,20.660749,20.226824,20.266272,16.395721,6939900
+2001-12-21,20.463511,20.502958,20.216963,20.394478,16.499441,12469700
+2001-12-24,20.660749,20.660749,20.157791,20.364891,16.475504,2786000
+2001-12-26,20.216963,20.502958,20.216963,20.315582,16.435617,4272900
+2001-12-27,20.463511,20.710060,20.276134,20.710060,16.754751,6159400
+2001-12-28,20.710060,20.857988,20.522682,20.660749,16.714859,7472000
+2001-12-31,20.660749,20.956608,20.266272,20.433926,16.531359,7565000
+2002-01-02,20.611441,21.213018,20.542406,21.153847,17.113781,8714100
+2002-01-03,21.794872,21.893492,21.597633,21.814596,17.648336,12444600
+2002-01-04,22.169624,22.435898,21.913216,22.386587,18.111092,9730900
+2002-01-07,22.633137,23.274162,22.495070,22.928993,18.549902,13701900
+2002-01-08,22.928993,22.928993,22.337278,22.475346,18.182896,6349000
+2002-01-09,22.554241,22.573965,21.351086,21.499014,17.393030,9392300
+2002-01-10,21.913216,22.071007,21.301775,21.982248,17.783970,6215800
+2002-01-11,21.548323,22.031559,21.429979,21.568047,17.448874,5683600
+2002-01-14,21.262327,21.499014,20.907297,21.074951,17.049955,6435600
+2002-01-15,21.153847,21.321499,20.887573,21.183432,17.137724,7070700
+2002-01-16,21.183432,21.203156,20.216963,20.216963,16.355835,6979200
+2002-01-17,20.562130,21.045364,20.473373,20.946745,16.946232,6577000
+2002-01-18,20.907297,21.015779,20.710060,20.907297,16.914320,5678100
+2002-01-22,21.104536,21.183432,20.700197,20.818541,16.842514,5558600
+2002-01-23,20.769230,21.203156,20.522682,21.114399,17.081869,7456700
+2002-01-24,21.400394,21.607494,20.512821,20.907297,16.914320,7934100
+2002-01-25,21.153847,21.597633,20.956608,21.479290,17.377077,5597100
+2002-01-28,22.287968,22.287968,21.499014,21.834320,17.664301,5217900
+2002-01-29,21.873768,21.932940,20.729782,21.449703,17.353136,7706500
+2002-01-30,20.956608,21.449703,19.743589,21.104536,17.073893,10947100
+2002-01-31,21.104536,21.942801,20.512821,20.769230,16.802620,10340000
+2002-02-01,22.140039,22.287968,21.548323,22.140039,17.911631,15990600
+2002-02-04,22.149900,22.149900,21.351086,21.380671,17.297285,7702700
+2002-02-05,21.203156,21.676529,21.153847,21.499014,17.393030,6497500
+2002-02-06,21.351086,21.844181,20.907297,21.124260,17.089848,6955700
+2002-02-07,21.331362,22.278107,21.232742,22.238659,17.991413,10493900
+2002-02-08,22.238659,23.264299,22.021696,23.096647,18.685537,10161300
+2002-02-11,22.938856,23.708086,22.790928,23.619329,19.108398,7728300
+2002-02-12,23.175543,23.372782,22.830376,23.076923,18.669582,9807400
+2002-02-13,22.928993,23.668638,22.751480,23.540434,19.044571,7264200
+2002-02-14,23.668638,24.053255,23.619329,23.905325,19.339771,6931500
+2002-02-15,23.856016,23.964497,23.431953,23.570021,19.068504,6693300
+2002-02-19,23.372782,23.560158,22.534517,22.544378,18.238743,7598700
+2002-02-20,22.879684,24.102564,22.731756,23.994083,19.411579,10835800
+2002-02-21,23.767258,24.013807,23.244576,23.402367,18.932871,6381600
+2002-02-22,23.165680,23.915188,22.948717,23.343195,18.884998,4362300
+2002-02-25,23.589743,24.063116,23.500986,23.885601,19.323812,6490000
+2002-02-26,24.132151,24.260355,23.254438,23.560158,19.060524,6650700
+2002-02-27,23.717949,24.161736,23.668638,23.915188,19.347754,7927200
+2002-02-28,23.422091,23.431953,22.544378,22.682446,18.350445,15557100
+2002-03-01,22.633137,23.668638,22.485207,23.658777,19.140306,9416400
+2002-03-04,23.668638,23.668638,22.928993,23.234715,18.797237,9679700
+2002-03-05,23.175543,23.925049,23.116371,23.708086,19.180204,11564400
+2002-03-06,23.668638,23.826429,23.461538,23.816568,19.267965,9079900
+2002-03-07,23.816568,23.816568,23.224852,23.431953,18.956808,7987100
+2002-03-08,23.668638,23.767258,23.037476,23.422091,18.948826,10425100
+2002-03-11,23.274162,23.747534,23.175543,23.589743,19.084459,3670900
+2002-03-12,23.491125,23.639053,23.116371,23.382643,18.916910,5886200
+2002-03-13,23.382643,23.570021,23.027613,23.145956,18.725430,4124500
+2002-03-14,23.067060,23.224852,22.859961,23.175543,18.749363,5967900
+2002-03-15,23.639053,24.063116,23.471401,23.984221,19.403597,11484100
+2002-03-18,24.142012,24.654833,23.856016,24.171598,19.555191,7754500
+2002-03-19,24.575937,24.575937,23.629190,23.964497,19.387644,7501200
+2002-03-20,23.816568,24.043392,23.491125,23.668638,19.148291,5092500
+2002-03-21,23.668638,23.767258,22.859961,23.303747,18.853086,4550400
+2002-03-22,23.323471,23.412230,22.938856,23.126232,18.709471,4041000
+2002-03-25,23.372782,23.372782,22.731756,22.879684,18.510012,4641200
+2002-03-26,22.731756,23.323471,22.337278,22.544378,18.238743,7753500
+2002-03-27,22.455622,22.731756,22.337278,22.573965,18.262682,5333000
+2002-03-28,22.830376,22.958580,22.544378,22.761341,18.414272,6054100
+2002-04-01,22.337278,22.672585,21.794872,22.583826,18.270657,6295500
+2002-04-02,22.485207,22.583826,22.189348,22.445759,18.158958,4509900
+2002-04-03,22.445759,22.475346,21.893492,22.130178,17.903652,5711500
+2002-04-04,22.130178,22.465483,21.942801,22.248520,17.999392,5069400
+2002-04-05,22.396450,23.293886,22.199211,22.919132,18.541924,8134900
+2002-04-08,22.435898,22.859961,22.416174,22.800789,18.446186,5162400
+2002-04-09,22.879684,23.145956,22.702169,22.751480,18.406290,4570000
+2002-04-10,23.126232,23.905325,22.928993,23.717949,19.188181,10196000
+2002-04-11,23.570021,23.757397,22.948717,23.145956,18.725430,7709500
+2002-04-12,23.155819,23.717949,23.076923,23.668638,19.148291,4893100
+2002-04-15,23.668638,23.668638,23.116371,23.313610,18.861065,4156400
+2002-04-16,23.767258,23.806705,23.441814,23.570021,19.068504,5326100
+2002-04-17,23.668638,23.915188,23.471401,23.777121,19.236057,6614700
+2002-04-18,23.796844,24.132151,22.928993,24.063116,19.467426,11075800
+2002-04-19,24.408283,24.822485,24.408283,24.605522,19.906242,16934400
+2002-04-22,24.654833,24.792900,24.142012,24.201183,19.579126,5924800
+2002-04-23,24.349112,24.753452,23.964497,24.161736,19.547213,5511700
+2002-04-24,24.358974,24.654833,24.161736,24.181459,19.563164,6499000
+2002-04-25,23.915188,24.802761,23.353058,24.654833,19.946136,9203500
+2002-04-26,24.161736,24.526627,23.441814,23.767258,19.228071,11020600
+2002-04-29,23.570021,23.658777,22.465483,22.702169,18.366402,6550600
+2002-04-30,22.702169,23.175543,22.435898,22.859961,18.494053,7838000
+2002-05-01,23.106508,23.964497,22.692308,23.836292,19.283920,8374100
+2002-05-02,24.112427,24.358974,23.451677,23.698225,19.172222,5765900
+2002-05-03,23.471401,23.629190,22.781065,23.274162,18.829149,4677000
+2002-05-06,23.047337,23.254438,22.573965,22.573965,18.262682,4017700
+2002-05-07,22.583826,23.007889,22.465483,22.573965,18.262682,5324700
+2002-05-08,22.682446,23.570021,22.633137,23.500986,19.012655,6030600
+2002-05-09,23.274162,23.471401,22.938856,23.224852,18.789261,4737500
+2002-05-10,23.224852,23.224852,22.731756,22.830376,18.470121,5034700
+2002-05-13,23.126232,23.274162,22.938856,23.126232,18.709471,3537600
+2002-05-14,23.668638,24.151875,23.471401,24.063116,19.467426,6100200
+2002-05-15,23.767258,24.635109,23.757397,24.161736,19.547213,6486300
+2002-05-16,24.309664,24.378698,23.954636,24.191322,19.571148,4485800
+2002-05-17,23.964497,24.526627,23.964497,24.506903,19.826456,4387500
+2002-05-20,24.260355,24.309664,23.826429,24.092703,19.491362,3585800
+2002-05-21,24.142012,24.191322,23.214991,23.510849,19.020632,5973300
+2002-05-22,23.422091,23.471401,22.751480,23.126232,18.709471,6119000
+2002-05-23,23.372782,23.500986,22.978304,23.500986,19.012655,6814800
+2002-05-24,23.619329,23.648914,23.037476,23.234715,18.797237,3360000
+2002-05-28,23.471401,23.570021,23.136095,23.214991,18.781279,4681800
+2002-05-29,23.224852,23.323471,22.534517,22.731756,18.390337,6391800
+2002-05-30,22.435898,22.978304,22.287968,22.909269,18.533949,5044500
+2002-05-31,22.682446,23.096647,22.485207,22.593689,18.278639,5919300
+2002-06-03,22.682446,23.037476,21.992111,22.189348,17.951515,7062700
+2002-06-04,21.942801,22.140039,21.686390,21.932940,17.744083,9563300
+2002-06-05,21.932940,22.357002,21.725838,21.814596,17.648336,6859800
+2002-06-06,22.090731,22.090731,21.242603,21.400394,17.313242,5633400
+2002-06-07,20.956608,21.518738,20.808678,21.351086,17.273355,7704900
+2002-06-10,21.449703,21.982248,21.252466,21.676529,17.536642,5433100
+2002-06-11,21.696253,22.061144,21.173571,21.282051,17.217506,6669300
+2002-06-12,21.242603,21.351086,19.723866,20.857988,16.874432,11383000
+2002-06-13,20.710060,20.887573,20.502958,20.749506,16.786667,6982200
+2002-06-14,20.216963,20.907297,19.428007,20.739645,16.778685,6871300
+2002-06-17,20.710060,21.479290,20.562130,21.232742,17.177612,5372700
+2002-06-18,21.449703,21.883629,21.222879,21.646942,17.512705,5548700
+2002-06-19,20.956608,21.153847,20.463511,20.571993,16.643055,8547300
+2002-06-20,20.463511,20.473373,19.220907,19.289940,15.605851,11779200
+2002-06-21,19.033531,19.270218,18.885601,18.984221,15.358523,13335600
+2002-06-24,18.984221,19.230770,18.540434,18.737673,15.159060,14246700
+2002-06-25,19.378698,19.378698,18.915188,18.964497,15.342568,8698800
+2002-06-26,18.392506,18.925049,18.343195,18.757397,15.175019,9998700
+2002-06-27,19.132151,19.132151,18.195267,18.786982,15.198954,12785400
+2002-06-28,18.540434,18.717949,18.234715,18.639053,15.079278,14434400
+2002-07-01,18.639053,18.826429,17.988165,18.106508,14.648442,7890300
+2002-07-02,17.919132,18.441814,17.357002,18.244576,14.760139,11429700
+2002-07-03,18.244576,18.747534,18.116371,18.688362,15.119169,6263400
+2002-07-05,18.836292,19.723866,18.786982,19.516766,15.789358,3884800
+2002-07-08,19.329388,19.516766,18.589743,18.737673,15.159060,6652100
+2002-07-09,18.737673,19.102564,18.431953,18.500986,14.967582,6659200
+2002-07-10,18.589743,18.836292,17.879684,18.037476,14.592593,6415200
+2002-07-11,17.751480,18.441814,17.564102,18.441814,14.919706,8006300
+2002-07-12,18.047337,18.668638,17.751480,18.195267,14.720246,6089700
+2002-07-15,17.800789,18.007889,17.041420,17.731756,14.345259,9409700
+2002-07-16,17.011835,17.879684,16.962524,17.406311,14.081968,9290900
+2002-07-17,17.800789,18.007889,17.455622,17.889545,14.472916,7673100
+2002-07-18,17.751480,17.899408,17.307692,17.416174,14.089951,5594000
+2002-07-19,16.785009,16.962524,16.143984,16.410257,13.276147,10938300
+2002-07-22,16.321499,16.518738,15.443787,15.680473,12.685741,13556000
+2002-07-23,15.729783,16.074951,15.118343,15.414201,12.470322,14087200
+2002-07-24,14.940828,16.380671,14.694280,16.193295,13.100622,13198700
+2002-07-25,16.203156,16.765285,15.285996,16.301775,13.188386,10292300
+2002-07-26,16.074951,16.597633,15.710059,15.779093,12.765527,7686200
+2002-07-29,16.272190,17.209072,16.094675,17.110455,13.842616,10305800
+2002-07-30,17.110455,17.495070,16.420118,17.199211,13.914421,8974800
+2002-07-31,17.357002,17.485207,16.765285,17.485207,14.145798,7821700
+2002-08-01,17.199211,17.307692,16.568047,16.597633,13.427737,8659500
+2002-08-02,14.950690,15.098619,14.694280,15.098619,12.215012,30997700
+2002-08-05,14.852071,14.940828,13.658777,14.072978,11.385251,15896400
+2002-08-06,14.497042,14.763313,14.112426,14.181459,11.473016,12416900
+2002-08-07,14.349113,14.497042,13.560158,13.708087,11.090052,14054600
+2002-08-08,13.757397,14.644970,13.293885,14.299803,11.568758,19423500
+2002-08-09,14.151874,14.447732,13.806706,14.447732,11.688435,10842300
+2002-08-12,14.398422,14.408284,13.757397,14.033530,11.353337,9581900
+2002-08-13,13.806706,13.905326,13.560158,13.579882,10.986330,14147600
+2002-08-14,13.609468,14.378698,13.333333,14.299803,11.568758,16338300
+2002-08-15,14.299803,15.019724,14.280079,14.940828,12.087357,15129600
+2002-08-16,15.187377,15.996056,14.871795,15.641026,12.653828,14682700
+2002-08-19,15.976332,16.607494,15.779093,16.420118,13.284125,13072600
+2002-08-20,16.420118,16.508875,15.996056,16.222879,13.124554,7778700
+2002-08-21,16.370810,16.696253,16.232742,16.666666,13.483589,8489900
+2002-08-22,16.686390,17.209072,16.568047,17.130178,13.858575,7198700
+2002-08-23,16.814596,16.952663,16.025640,16.597633,13.427737,7133300
+2002-08-26,16.597633,16.646942,15.986194,16.439842,13.300083,7194400
+2002-08-27,16.479290,16.617357,15.710059,15.779093,12.765527,9182000
+2002-08-28,15.729783,15.729783,15.039448,15.187377,12.286819,7680000
+2002-08-29,14.891519,15.749507,14.654832,15.552268,12.582022,8820200
+2002-08-30,15.581854,15.729783,15.305720,15.463511,12.510216,4174400
+2002-09-03,15.364891,15.364891,14.467456,14.526628,11.752261,9285100
+2002-09-04,14.773175,15.019724,14.526628,14.832347,11.999597,6635100
+2002-09-05,14.832347,14.832347,14.339251,14.556213,11.776198,5575600
+2002-09-06,14.763313,15.305720,14.704142,15.147929,12.254904,6243800
+2002-09-09,15.118343,15.976332,14.842209,15.680473,12.685741,7242900
+2002-09-10,15.680473,16.094675,15.562130,16.025640,12.964988,6859100
+2002-09-11,16.173571,16.410257,15.877712,15.956608,12.909138,3751900
+2002-09-12,15.877712,15.877712,15.157791,15.285996,12.366603,5732800
+2002-09-13,15.285996,15.542406,15.019724,15.285996,12.366603,6625300
+2002-09-16,15.285996,15.690335,15.118343,15.601578,12.621914,6131000
+2002-09-17,15.897436,16.025640,15.532544,15.759369,12.749569,7345000
+2002-09-18,15.443787,15.779093,15.147929,15.522682,12.558086,8556000
+2002-09-19,15.039448,15.276134,14.605523,14.605523,11.816092,6635200
+2002-09-20,14.743589,15.078896,14.644970,15.029586,12.159163,10051200
+2002-09-23,14.694280,14.940828,14.497042,14.861933,12.023530,7285200
+2002-09-24,14.447732,14.990138,14.388560,14.546351,11.768219,12970400
+2002-09-25,14.891519,15.285996,14.506904,15.216963,12.310754,8047700
+2002-09-26,15.256411,15.759369,15.029586,15.680473,12.685741,8752100
+2002-09-27,15.532544,15.680473,14.940828,15.029586,12.159163,11021200
+2002-09-30,14.842209,15.177515,14.398422,14.930966,12.079378,11424200
+2002-10-01,15.039448,15.581854,14.812623,15.483234,12.526171,11159100
+2002-10-02,15.483234,16.469427,15.315582,16.025640,12.964988,15248300
+2002-10-03,16.321499,16.420118,15.591716,15.779093,12.765527,13386600
+2002-10-04,15.828403,15.976332,14.556213,14.842209,12.007573,14944100
+2002-10-07,14.842209,15.187377,13.905326,13.944773,11.281532,12641000
+2002-10-08,13.954635,14.743589,13.708087,14.447732,11.688435,11850900
+2002-10-09,14.122288,14.487180,13.806706,14.003944,11.329405,7984000
+2002-10-10,14.102564,15.216963,13.796844,15.009862,12.143206,9245600
+2002-10-11,15.631164,15.927022,15.295858,15.779093,12.765527,8962800
+2002-10-14,15.138067,16.005917,15.128205,15.857988,12.829353,8013700
+2002-10-15,16.173571,16.518738,15.887574,16.518738,13.363911,11613900
+2002-10-16,16.173571,16.439842,15.838264,16.173571,13.084664,8259600
+2002-10-17,16.518738,16.666666,16.272190,16.518738,13.363911,7910400
+2002-10-18,16.025640,16.538462,15.690335,16.518738,13.363911,8335400
+2002-10-21,16.518738,17.159763,16.291914,17.140039,13.866552,8860500
+2002-10-22,16.962524,17.248520,16.410257,16.873768,13.651135,8432600
+2002-10-23,16.765285,17.702169,16.696253,17.554241,14.201646,12751400
+2002-10-24,17.702169,17.859961,16.893492,17.159763,13.882509,12054600
+2002-10-25,17.110455,17.781065,16.913216,17.731756,14.345259,7022000
+2002-10-28,17.751480,17.899408,16.962524,17.140039,13.866552,8039100
+2002-10-29,16.893492,17.169624,16.291914,16.518738,13.363911,8380000
+2002-10-30,16.518738,16.844181,16.420118,16.597633,13.427737,6103900
+2002-10-31,16.508875,16.597633,16.291914,16.469427,13.324016,7434900
+2002-11-01,16.518738,16.863905,16.301775,16.794872,13.587306,6313500
+2002-11-04,17.899408,18.333334,17.593689,17.800789,14.401107,12667000
+2002-11-05,17.948717,18.491125,17.879684,18.372782,14.863857,8017300
+2002-11-06,18.441814,18.727810,17.840237,18.599606,15.047368,10423100
+2002-11-07,18.441814,18.589743,17.702169,18.007889,14.568657,9331300
+2002-11-08,16.824457,17.741617,16.824457,17.435898,14.105908,10705800
+2002-11-11,17.357002,17.455622,16.863905,17.130178,13.858575,4549400
+2002-11-12,17.130178,17.514793,16.992111,17.307692,14.002186,5591300
+2002-11-13,17.297831,17.751480,16.824457,17.652861,14.281430,6784200
+2002-11-14,17.928993,18.037476,17.692308,17.869822,14.456959,5740600
+2002-11-15,17.998028,18.431953,17.702169,18.274162,14.784075,5955100
+2002-11-18,18.392506,18.629190,18.017752,18.057199,14.608551,5335800
+2002-11-19,17.948717,18.205128,17.751480,17.928993,14.504830,4766400
+2002-11-20,17.751480,18.372782,17.682446,18.293886,14.800031,5153100
+2002-11-21,18.293886,19.378698,18.195267,19.280079,15.597876,9092400
+2002-11-22,19.280079,19.526627,18.688362,19.260355,15.581921,8552500
+2002-11-25,19.280079,19.822485,19.161736,19.368835,15.669684,7331100
+2002-11-26,18.786982,19.151875,18.609467,18.747534,15.167040,6913000
+2002-11-27,19.181459,19.921104,19.053255,19.615385,15.869142,7176300
+2002-11-29,19.477318,19.871796,19.477318,19.546350,15.813294,3943000
+2002-12-02,19.625246,19.960552,18.885601,19.122288,15.470226,8462000
+2002-12-03,18.737673,18.826429,18.195267,18.284023,14.792053,7302700
+2002-12-04,17.199211,17.652861,16.617357,17.435898,14.105908,17370800
+2002-12-05,17.485207,17.554241,17.061144,17.396450,14.073993,6467500
+2002-12-06,17.011835,17.199211,16.617357,16.932940,13.699008,9733000
+2002-12-09,16.893492,16.992111,16.538462,16.617357,13.443695,6270100
+2002-12-10,16.420118,16.942801,16.272190,16.804733,13.595285,9685700
+2002-12-11,16.420118,16.913216,16.390533,16.627218,13.619519,5459000
+2002-12-12,16.489151,16.814596,16.311638,16.637081,13.627596,5728900
+2002-12-13,16.291914,16.518738,16.173571,16.291914,13.344866,6081000
+2002-12-16,16.420118,16.814596,16.321499,16.646942,13.635675,5968400
+2002-12-17,16.617357,17.001972,16.528599,16.794872,13.756845,5403400
+2002-12-18,16.804733,16.804733,16.173571,16.301775,13.352943,7357800
+2002-12-19,16.193295,16.420118,15.769231,15.976332,13.086370,7233600
+2002-12-20,16.321499,16.469427,15.986194,16.232742,13.296399,7648600
+2002-12-23,16.272190,16.735701,16.242603,16.400394,13.433723,5791500
+2002-12-24,16.074951,16.410257,16.074951,16.183432,13.256010,2395300
+2002-12-26,16.222879,16.715977,16.222879,16.331362,13.377177,4175800
+2002-12-27,16.173571,16.439842,15.719921,15.788955,12.932887,5563600
+2002-12-30,15.443787,15.838264,15.285996,15.818541,12.957121,7653400
+2002-12-31,15.729783,16.222879,15.601578,16.084812,13.175229,6076200
+2003-01-02,16.568047,17.140039,16.351086,17.021696,13.942639,10540300
+2003-01-03,17.021696,17.209072,16.844181,17.120316,14.023418,4638100
+2003-01-06,17.228796,18.037476,17.209072,17.998028,14.742362,7955100
+2003-01-07,17.948717,18.116371,17.751480,17.800789,14.580803,6153000
+2003-01-08,17.810652,17.810652,17.327415,17.435898,14.281914,6417400
+2003-01-09,17.554241,18.047337,17.514793,17.830376,14.605034,7357400
+2003-01-10,17.830376,18.205128,17.702169,17.899408,14.661584,6772200
+2003-01-13,18.145956,18.284023,17.899408,18.047337,14.782749,5745000
+2003-01-14,17.899408,18.254438,17.859961,18.214991,14.920079,5471800
+2003-01-15,18.047337,18.195267,17.534517,17.741617,14.532334,7520600
+2003-01-16,17.948717,18.185404,17.652861,17.800789,14.580803,6180900
+2003-01-17,17.800789,18.027613,17.465483,17.554241,14.378853,6720900
+2003-01-21,17.751480,17.840237,16.883629,16.913216,13.853784,6459100
+2003-01-22,16.913216,17.396450,16.804733,17.080868,13.991105,6219100
+2003-01-23,17.080868,17.554241,16.923077,17.435898,14.281914,4476100
+2003-01-24,17.357002,17.357002,16.439842,16.863905,13.813388,8230400
+2003-01-27,16.666666,16.755424,16.252466,16.518738,13.530661,8108500
+2003-01-28,16.568047,16.745562,16.351086,16.617357,13.611443,6470600
+2003-01-29,16.627218,17.090731,16.124260,16.824457,13.781079,6992300
+2003-01-30,16.824457,16.824457,16.025640,16.124260,13.207538,7053600
+2003-01-31,16.568047,17.435898,16.528599,17.258383,14.136513,12433300
+2003-02-03,17.416174,17.554241,17.071007,17.209072,14.096121,6314500
+2003-02-04,16.893492,16.952663,16.508875,16.666666,13.651830,5600200
+2003-02-05,16.814596,17.051283,16.380671,16.666666,13.651830,5882100
+2003-02-06,16.666666,16.863905,16.370810,16.765285,13.732608,5120400
+2003-02-07,16.469427,16.745562,16.055227,16.124260,13.207538,6869600
+2003-02-10,16.104536,16.311638,15.857988,16.232742,13.296399,6215200
+2003-02-11,16.272190,16.568047,16.055227,16.252466,13.312556,6271900
+2003-02-12,16.045364,16.252466,15.719921,16.074951,13.167151,6507000
+2003-02-13,15.927022,16.084812,15.660749,15.907298,13.029825,7129300
+2003-02-14,15.729783,16.499014,15.719921,16.499014,13.514506,5404500
+2003-02-18,16.617357,17.061144,16.538462,17.061144,13.974949,5773700
+2003-02-19,17.011835,17.011835,16.400394,16.617357,13.611443,4337300
+2003-02-20,16.538462,16.785009,16.420118,16.617357,13.611443,4932100
+2003-02-21,16.755424,17.061144,16.301775,16.785009,13.748765,6577100
+2003-02-24,16.646942,16.706114,16.321499,16.380671,13.417568,5080600
+2003-02-25,16.074951,16.607494,15.759369,16.528599,13.538737,7554300
+2003-02-26,16.370810,16.577909,16.213018,16.439842,13.466037,5611900
+2003-02-27,16.469427,16.696253,16.134123,16.272190,13.328712,8512600
+2003-02-28,16.518738,16.863905,16.400394,16.824457,13.781079,11211300
+2003-03-03,16.863905,16.952663,16.469427,16.568047,13.571051,5824900
+2003-03-04,16.568047,16.568047,15.779093,15.828403,12.965199,10953500
+2003-03-05,15.581854,15.857988,15.483234,15.719921,12.876344,10250200
+2003-03-06,15.571992,15.670611,15.295858,15.443787,12.650160,7595500
+2003-03-07,15.197239,15.532544,15.088758,15.502958,12.698626,9776100
+2003-03-10,15.285996,15.364891,14.891519,14.970414,12.262415,6012300
+2003-03-11,15.108481,15.207101,14.704142,14.812623,12.133163,7513600
+2003-03-12,14.802761,15.029586,14.635109,14.970414,12.262415,6850300
+2003-03-13,15.187377,16.124260,15.108481,15.976332,13.086370,9530800
+2003-03-14,16.055227,16.291914,15.828403,16.193295,13.264088,8425900
+2003-03-17,15.966470,16.863905,15.808679,16.775148,13.740688,9678500
+2003-03-18,16.775148,16.942801,16.183432,16.351086,13.393334,8204200
+2003-03-19,16.420118,16.765285,16.173571,16.735701,13.708374,9078200
+2003-03-20,16.341223,16.982248,16.025640,16.903353,13.845702,11864700
+2003-03-21,17.189348,18.481262,17.051283,18.481262,15.138185,16750300
+2003-03-24,17.662722,17.761341,17.080868,17.159763,14.055728,13098400
+2003-03-25,17.248520,17.741617,16.715977,17.445759,14.289991,8783400
+2003-03-26,17.603550,17.662722,17.199211,17.386587,14.241527,9534100
+2003-03-27,17.110455,17.524654,16.863905,17.337278,14.201134,9656700
+2003-03-28,17.337278,17.623274,17.130178,17.445759,14.289991,7976100
+2003-03-31,17.061144,17.061144,16.656805,16.785009,13.748765,7780700
+2003-04-01,16.785009,17.011835,16.715977,16.873768,13.821470,8342300
+2003-04-02,17.120316,17.800789,17.120316,17.731756,14.524256,9327500
+2003-04-03,17.751480,17.820513,17.376726,17.406311,14.257679,7762000
+2003-04-04,17.357002,17.514793,16.863905,17.199211,14.088045,8713200
+2003-04-07,17.751480,18.047337,17.327415,17.495070,14.330383,6929800
+2003-04-08,17.495070,17.514793,16.686390,16.893492,13.837624,24307900
+2003-04-09,16.962524,17.140039,16.834320,16.863905,13.813388,21113300
+2003-04-10,16.962524,17.120316,16.765285,17.100592,14.007263,7290300
+2003-04-11,17.258383,17.721893,17.258383,17.593689,14.411165,12931800
+2003-04-14,17.357002,17.998028,17.327415,17.948717,14.701970,9730500
+2003-04-15,17.800789,18.076923,17.652861,18.057199,14.790829,8184100
+2003-04-16,18.057199,18.136095,17.672585,17.741617,14.532334,8690400
+2003-04-17,17.741617,18.155819,17.702169,18.145956,14.863532,5778000
+2003-04-21,18.145956,18.293886,17.889545,18.195267,14.903923,6380600
+2003-04-22,17.998028,18.737673,17.919132,18.609467,15.243196,9820100
+2003-04-23,18.560158,19.013807,18.500986,18.974360,15.542086,9214000
+2003-04-24,18.688362,18.727810,18.195267,18.392506,15.065482,6333600
+2003-04-25,18.441814,18.471401,17.751480,17.909269,14.669659,7082000
+2003-04-28,18.017752,18.284023,17.978304,18.195267,14.903923,7197700
+2003-04-29,18.284023,18.540434,17.938856,18.076923,14.806982,7339700
+2003-04-30,18.145956,18.441814,17.919132,18.402367,15.073559,8600800
+2003-05-01,18.441814,18.589743,18.293886,18.461538,15.122027,7920100
+2003-05-02,18.461538,18.727810,17.455622,18.629190,15.259351,11946500
+2003-05-05,18.629190,18.629190,18.165680,18.234715,14.936234,7732700
+2003-05-06,18.234715,18.431953,18.067060,18.205128,14.911997,9112100
+2003-05-07,18.205128,18.323471,17.978304,18.106508,14.831218,6551400
+2003-05-08,18.007889,18.145956,17.800789,17.919132,14.677737,7186400
+2003-05-09,17.998028,18.441814,17.869822,18.402367,15.073559,7046400
+2003-05-12,18.402367,18.816568,18.086784,18.777121,15.380524,7226800
+2003-05-13,18.481262,18.540434,18.195267,18.244576,14.944312,8584300
+2003-05-14,18.500986,18.500986,17.899408,18.047337,14.782749,9390000
+2003-05-15,18.126232,18.234715,17.988165,18.205128,14.911997,6896100
+2003-05-16,17.978304,18.244576,17.978304,18.185404,14.895844,7270200
+2003-05-19,17.998028,17.998028,17.524654,17.573965,14.395007,7529900
+2003-05-20,17.652861,17.850100,17.366863,17.603550,14.419240,8137600
+2003-05-21,18.047337,18.096647,17.209072,17.633137,14.443476,8926200
+2003-05-22,17.702169,18.126232,17.672585,17.869822,14.637347,10428900
+2003-05-23,17.998028,18.096647,17.948717,17.988165,14.734284,6128400
+2003-05-27,17.909269,18.678501,17.899408,18.609467,15.243196,11730200
+2003-05-28,18.737673,19.250494,18.639053,19.171598,15.703644,14968100
+2003-05-29,19.575937,19.575937,18.915188,19.013807,15.574395,12745000
+2003-05-30,19.082840,19.546350,19.033531,19.378698,15.873283,8101200
+2003-06-02,19.625246,19.901381,19.546350,19.792900,16.212559,15869900
+2003-06-03,19.723866,19.812624,19.388559,19.487179,15.962141,8667200
+2003-06-04,19.723866,20.591717,19.723866,20.443787,16.745705,14958500
+2003-06-05,20.216963,20.798817,20.128204,20.650888,16.915346,10852100
+2003-06-06,20.759369,21.252466,20.759369,20.986193,17.189997,14818300
+2003-06-09,20.956608,20.986193,19.822485,20.039448,16.414511,11448000
+2003-06-10,20.285995,20.424063,20.167652,20.335306,16.656847,6303700
+2003-06-11,20.562130,20.562130,19.575937,20.512821,16.802254,6577900
+2003-06-12,20.463511,20.611441,20.019724,20.512821,16.802254,8599000
+2003-06-13,20.660749,20.670612,19.861933,20.069033,16.438745,7051300
+2003-06-16,20.059172,20.493097,20.000000,20.384615,16.697237,7314000
+2003-06-17,20.690336,20.936884,20.384615,20.848125,17.076906,9029700
+2003-06-18,20.710060,20.749506,20.177515,20.325443,16.648769,9644000
+2003-06-19,20.394478,20.404339,19.743589,19.812624,16.228714,8318400
+2003-06-20,20.108480,20.246548,19.891520,20.049309,16.422583,10065200
+2003-06-23,19.901381,20.049309,19.368835,19.585798,16.042917,8798500
+2003-06-24,19.585798,19.654833,19.220907,19.358974,15.857123,7902900
+2003-06-25,19.437870,19.477318,18.737673,18.846153,15.437069,13576700
+2003-06-26,18.875740,19.201183,18.589743,19.132151,15.671330,8250300
+2003-06-27,19.280079,19.408283,18.964497,19.072979,15.622865,6628500
+2003-06-30,19.723866,19.891520,19.270218,19.477318,15.954062,14836000
+2003-07-01,19.487179,19.664694,19.132151,19.615385,16.067154,8454300
+2003-07-02,19.723866,20.000000,19.585798,19.930967,16.325649,9348800
+2003-07-03,19.684418,20.285995,19.654833,19.792900,16.212559,5198400
+2003-07-07,19.970415,20.414202,19.921104,20.295858,16.624538,6854000
+2003-07-08,19.990139,20.779093,19.990139,20.315582,16.640694,12730800
+2003-07-09,20.187376,20.838264,20.157791,20.631165,16.899189,9988900
+2003-07-10,20.325443,20.483234,20.128204,20.276134,16.608383,7512100
+2003-07-11,20.276134,20.591717,20.276134,20.552269,16.834570,5256400
+2003-07-14,20.660749,21.252466,20.660749,21.015779,17.214231,10430400
+2003-07-15,21.301775,21.321499,20.887573,21.055227,17.246542,10921500
+2003-07-16,21.143984,21.153847,20.690336,20.759369,17.004204,6860200
+2003-07-17,20.759369,20.857988,20.424063,20.532545,16.818411,5904400
+2003-07-18,20.680473,20.759369,20.453650,20.719921,16.971891,5586800
+2003-07-21,20.512821,20.552269,19.980276,20.128204,16.487213,6989500
+2003-07-22,20.216963,20.749506,20.069033,20.532545,16.818411,7362300
+2003-07-23,20.542406,20.631165,20.019724,20.325443,16.648769,5913900
+2003-07-24,20.562130,20.946745,20.374754,20.562130,16.842644,7502400
+2003-07-25,20.680473,21.035503,20.522682,20.996056,17.198076,7741800
+2003-07-28,21.400394,21.863905,21.331362,21.607494,17.698910,13588300
+2003-07-29,21.686390,21.923077,21.518738,21.745562,17.812008,8503200
+2003-07-30,21.745562,21.804733,21.331362,21.390533,17.521200,7955100
+2003-07-31,21.696253,21.804733,21.400394,21.617357,17.706989,8680500
+2003-08-01,22.071007,23.471401,22.071007,22.209072,18.191675,15776200
+2003-08-04,22.080868,22.238659,21.528599,21.804733,17.860468,8146700
+2003-08-05,21.794872,21.794872,21.213018,21.252466,17.408102,6804100
+2003-08-06,21.252466,21.449703,21.025640,21.114399,17.295010,7038800
+2003-08-07,21.213018,21.499014,21.055227,21.351086,17.488888,7012700
+2003-08-08,21.420118,21.548323,21.272190,21.538462,17.642366,5928600
+2003-08-11,21.538462,21.785009,21.499014,21.745562,17.812008,6717200
+2003-08-12,21.765285,22.179487,21.696253,22.140039,18.135126,6962000
+2003-08-13,22.179487,22.228796,21.893492,22.189348,18.175510,5717800
+2003-08-14,22.189348,22.376726,21.873768,22.248520,18.223980,6426200
+2003-08-15,22.317554,22.317554,22.041420,22.140039,18.135126,3036100
+2003-08-18,22.169624,22.218935,22.061144,22.090731,18.094736,7270400
+2003-08-19,22.090731,22.140039,21.646942,21.923077,17.957407,8055600
+2003-08-20,21.499014,21.785009,21.459566,21.676529,17.755459,7177500
+2003-08-21,21.666666,21.706114,21.143984,21.301775,17.448494,13798600
+2003-08-22,21.449703,21.499014,21.015779,21.114399,17.295010,9765000
+2003-08-25,20.907297,21.035503,20.808678,20.956608,17.165764,6174900
+2003-08-26,20.838264,20.946745,20.562130,20.562130,16.842644,11142300
+2003-08-27,20.571993,20.650888,20.355030,20.433926,16.737627,10702000
+2003-08-28,20.512821,20.542406,19.871796,20.019724,16.398354,16050600
+2003-08-29,20.039448,20.414202,19.940828,20.216963,16.559910,9275900
+2003-09-02,20.443787,20.749506,20.276134,20.660749,16.923424,12163400
+2003-09-03,20.907297,21.173571,20.542406,21.025640,17.222307,11595000
+2003-09-04,20.887573,21.193295,20.877712,21.163708,17.335398,8395400
+2003-09-05,21.134123,21.242603,20.838264,20.897436,17.117294,7582300
+2003-09-08,20.828402,21.104536,20.690336,21.065088,17.254622,6993000
+2003-09-09,20.660749,20.917160,20.641026,20.887573,17.109217,6561000
+2003-09-10,20.581854,20.650888,20.177515,20.315582,16.640694,9735000
+2003-09-11,20.276134,20.502958,20.197239,20.394478,16.705318,6547600
+2003-09-12,20.147928,20.157791,19.792900,19.960552,16.349884,9372800
+2003-09-15,19.723866,20.167652,19.723866,19.822485,16.236797,11569500
+2003-09-16,19.921104,20.216963,19.773176,20.157791,16.511446,11424700
+2003-09-17,20.078896,20.266272,19.921104,19.990139,16.374117,7861900
+2003-09-18,20.019724,20.167652,19.812624,20.167652,16.519522,7966800
+2003-09-19,20.069033,20.216963,19.930967,20.197239,16.543760,8939800
+2003-09-22,19.871796,19.950691,19.595661,19.733728,16.164093,10303600
+2003-09-23,19.842209,20.019724,19.743589,19.921104,16.317574,8874500
+2003-09-24,19.852072,19.881657,19.506903,19.536489,16.002529,9745800
+2003-09-25,20.039448,20.364891,19.871796,20.029585,16.406429,14081600
+2003-09-26,20.049309,20.118343,19.694281,19.802761,16.220638,7970300
+2003-09-29,19.743589,20.019724,19.566074,20.000000,16.382202,7071000
+2003-09-30,19.871796,20.197239,19.585798,19.891520,16.293333,12524600
+2003-10-01,20.138067,20.562130,20.078896,20.542406,16.826485,11397200
+2003-10-02,20.542406,20.710060,20.374754,20.650888,16.915346,7593900
+2003-10-03,21.005917,21.351086,20.956608,20.986193,17.189997,10508300
+2003-10-06,20.956608,21.104536,20.719921,20.897436,17.117294,4999600
+2003-10-07,20.779093,21.291914,20.719921,21.262327,17.416185,8215900
+2003-10-08,21.331362,21.380671,21.015779,21.163708,17.335398,5640400
+2003-10-09,21.646942,21.785009,21.252466,21.577909,17.674675,10140600
+2003-10-10,21.301775,21.499014,21.232742,21.360947,17.496958,5991800
+2003-10-13,21.449703,21.577909,21.084812,21.203156,17.367710,7426800
+2003-10-14,21.163708,21.755424,21.114399,21.646942,17.731220,7374900
+2003-10-15,21.646942,21.696253,21.351086,21.449703,17.569662,6544400
+2003-10-16,21.351086,21.637081,21.351086,21.597633,17.690834,4461300
+2003-10-17,21.676529,21.686390,21.232742,21.282051,17.432337,5576100
+2003-10-20,21.400394,21.410257,21.084812,21.252466,17.408102,4935100
+2003-10-21,21.400394,21.597633,21.213018,21.489151,17.601974,6412100
+2003-10-22,21.400394,21.577909,21.222879,21.321499,17.464647,5750700
+2003-10-23,21.321499,21.548323,21.222879,21.548323,17.650448,4657000
+2003-10-24,21.548323,21.548323,21.213018,21.499014,17.610058,4700100
+2003-10-27,21.538462,22.021696,21.518738,21.834320,17.884706,7806600
+2003-10-28,21.992111,22.287968,21.893492,22.228796,18.207823,9315300
+2003-10-29,22.228796,22.524654,22.021696,22.455622,18.393621,6551900
+2003-10-30,22.534517,22.633137,22.090731,22.564102,18.482481,6245800
+2003-10-31,22.534517,22.564102,22.090731,22.327415,18.288610,5983700
+2003-11-03,22.534517,22.672585,22.386587,22.603550,18.514788,6613600
+2003-11-04,22.485207,22.682446,22.278107,22.435898,18.377462,6048600
+2003-11-05,22.435898,22.751480,22.287968,22.702169,18.595570,5679800
+2003-11-06,22.583826,23.116371,22.189348,23.076923,18.902536,7444400
+2003-11-07,23.126232,23.431953,22.968441,23.076923,18.902536,7920300
+2003-11-10,22.978304,23.086784,22.781065,22.830376,18.700586,4090300
+2003-11-11,22.731756,22.899408,22.692308,22.800789,18.676352,3680500
+2003-11-12,22.800789,23.057199,22.712032,23.017752,18.854067,6575000
+2003-11-13,23.076923,23.106508,22.909269,22.998028,18.837912,6273500
+2003-11-14,22.948717,23.057199,22.603550,22.751480,18.635963,4851600
+2003-11-17,22.495070,22.830376,22.337278,22.712032,18.603645,7608500
+2003-11-18,22.830376,22.830376,22.179487,22.209072,18.191675,8183000
+2003-11-19,22.435898,22.564102,22.278107,22.475346,18.409773,5801900
+2003-11-20,22.258383,22.761341,22.258383,22.366863,18.320921,8095600
+2003-11-21,22.426035,22.465483,22.071007,22.268244,18.240133,10382300
+2003-11-24,22.307692,22.534517,22.238659,22.514793,18.442085,7257900
+2003-11-25,22.485207,22.919132,22.406311,22.672585,18.571341,5700400
+2003-11-26,22.879684,22.879684,22.583826,22.761341,18.644039,4758200
+2003-11-28,22.751480,22.820513,22.692308,22.771204,18.652119,1977800
+2003-12-01,22.771204,23.017752,22.642998,22.850100,18.716740,12501200
+2003-12-02,22.534517,22.731756,22.130178,22.268244,18.240133,14356800
+2003-12-03,22.248520,22.287968,21.489151,21.548323,17.650448,17247600
+2003-12-04,21.706114,21.893492,21.597633,21.893492,17.933174,10834100
+2003-12-05,21.696253,21.725838,21.262327,21.370810,17.505041,9607900
+2003-12-08,21.301775,21.489151,21.301775,21.469427,17.585821,6728900
+2003-12-09,21.676529,22.041420,21.617357,21.992111,18.013950,13543300
+2003-12-10,21.844181,22.396450,21.725838,22.100592,18.274906,13189400
+2003-12-11,22.071007,22.396450,21.962524,22.337278,18.470621,7584900
+2003-12-12,22.327415,22.593689,22.258383,22.504930,18.609255,6289900
+2003-12-15,22.662722,22.761341,22.347139,22.514793,18.617409,7827000
+2003-12-16,22.573965,22.731756,22.534517,22.633137,18.715269,5940400
+2003-12-17,22.564102,22.623274,22.376726,22.623274,18.707109,4614400
+2003-12-18,22.623274,22.859961,22.554241,22.820513,18.870209,4436100
+2003-12-19,22.366863,22.938856,22.366863,22.909269,18.943600,9117100
+2003-12-22,22.751480,23.126232,22.633137,23.126232,19.123007,7838600
+2003-12-23,23.027613,23.165680,22.938856,23.136095,19.131163,5655800
+2003-12-24,23.007889,23.126232,22.938856,23.047337,19.057766,4252500
+2003-12-26,23.047337,23.136095,22.830376,22.889545,18.927288,2651500
+2003-12-29,22.840237,23.175543,22.840237,23.155819,19.147470,4673900
+2003-12-30,23.106508,23.234715,22.938856,22.978304,19.000683,4092000
+2003-12-31,22.988165,23.126232,22.869822,23.007889,19.025145,4742600
+2004-01-02,23.165680,23.589743,23.136095,23.343195,19.302412,6758100
+2004-01-05,23.343195,23.865877,23.284023,23.806705,19.685686,10511900
+2004-01-06,23.668638,23.934912,23.668638,23.865877,19.734617,9590900
+2004-01-07,24.142012,24.299803,23.925049,24.260355,20.060806,9883800
+2004-01-08,24.260355,24.635109,24.211046,24.615385,20.354380,9706100
+2004-01-09,24.408283,24.644970,24.368835,24.526627,20.280985,7514500
+2004-01-12,24.437870,24.605522,24.260355,24.536489,20.289146,6526000
+2004-01-13,24.467455,24.644970,24.260355,24.339251,20.126051,11996500
+2004-01-14,24.536489,24.684418,24.349112,24.556213,20.305450,13038900
+2004-01-15,24.585798,24.733728,24.368835,24.477318,20.240213,9033100
+2004-01-16,24.585798,24.595661,24.428007,24.506903,20.264677,8135900
+2004-01-20,24.457594,24.497042,23.974360,24.132151,19.954796,9003100
+2004-01-21,23.974360,24.260355,23.925049,24.240631,20.044497,7286100
+2004-01-22,24.240631,24.447731,24.142012,24.358974,20.142357,4895700
+2004-01-23,24.358974,24.497042,23.422091,23.717949,19.612291,10104800
+2004-01-26,23.589743,24.102564,23.589743,24.063116,19.897711,6056500
+2004-01-27,24.092703,24.142012,23.737673,23.826429,19.702003,4784500
+2004-01-28,23.737673,23.796844,23.185404,23.343195,19.302412,7408700
+2004-01-29,23.471401,24.309664,22.662722,24.112427,19.938486,15634000
+2004-01-30,23.392506,23.846153,23.372782,23.668638,19.571518,17271400
+2004-02-02,23.471401,23.698225,23.274162,23.471401,19.408426,9371600
+2004-02-03,23.106508,23.550297,22.781065,22.938856,18.968063,10144600
+2004-02-04,22.741617,23.392506,22.731756,22.869822,18.910978,9088900
+2004-02-05,22.978304,23.195267,22.820513,22.879684,18.919134,7447700
+2004-02-06,22.781065,23.205128,22.583826,23.027613,19.041458,10142400
+2004-02-09,23.027613,23.648914,22.958580,23.441814,19.383959,10455900
+2004-02-10,23.520710,23.964497,23.422091,23.747534,19.636759,7389000
+2004-02-11,27.534517,27.613413,26.893492,27.218935,22.507248,116625000
+2004-02-12,27.564102,28.017752,27.228796,27.613413,22.833439,58674700
+2004-02-13,27.218935,27.366863,26.479290,26.548323,21.952721,43292800
+2004-02-17,27.021696,27.130178,26.124260,26.528599,21.936413,28671300
+2004-02-18,26.429979,26.489151,26.193295,26.341223,21.781475,11734900
+2004-02-19,26.627218,26.676529,26.331362,26.627218,22.017956,9974700
+2004-02-20,26.617357,26.617357,26.025640,26.183432,21.650995,13152600
+2004-02-23,26.183432,26.380671,25.660749,26.380671,21.814091,13661500
+2004-02-24,26.025640,26.065088,25.443787,25.601578,21.169859,15827200
+2004-02-25,25.641026,26.025640,25.552269,25.936884,21.447124,12070700
+2004-02-26,26.183432,26.607494,26.065088,26.360947,21.797779,11434000
+2004-02-27,26.508875,26.508875,26.153847,26.163708,21.634686,9965100
+2004-03-01,26.429979,26.666666,26.272190,26.499014,21.911943,10605800
+2004-03-02,26.331362,26.676529,26.331362,26.390533,21.822248,8531000
+2004-03-03,26.331362,26.528599,26.213018,26.282051,21.732542,10622200
+2004-03-04,26.291914,26.617357,26.291914,26.429979,21.854864,7671700
+2004-03-05,26.390533,26.479290,26.045364,26.114399,21.593908,8569000
+2004-03-08,26.183432,26.282051,25.838264,25.877712,21.398193,6778700
+2004-03-09,25.877712,26.045364,25.621302,25.867849,21.390041,8952700
+2004-03-10,25.788954,25.897436,25.069033,25.098619,20.753965,13672100
+2004-03-11,25.098619,25.167652,24.408283,24.605522,20.346224,13090300
+2004-03-12,24.832348,25.739645,24.556213,25.739645,21.284031,15640200
+2004-03-15,25.049309,25.138067,24.753452,24.980276,20.656109,11250800
+2004-03-16,25.000000,25.147928,24.871796,25.049309,20.713190,9537800
+2004-03-17,25.108480,25.591717,25.009861,25.443787,21.039381,6610200
+2004-03-18,25.493097,25.700197,25.226824,25.374754,20.982302,7418800
+2004-03-19,25.384615,25.581854,25.000000,25.039448,20.705040,8138300
+2004-03-22,24.753452,24.921104,24.526627,24.556213,20.305450,8651300
+2004-03-23,24.753452,25.000000,24.566074,24.753452,20.468550,7542200
+2004-03-24,24.753452,24.753452,24.211046,24.428007,20.199436,8694000
+2004-03-25,24.635109,24.812624,24.467455,24.704142,20.427769,7138200
+2004-03-26,24.654833,24.891520,24.566074,24.753452,20.468550,5353600
+2004-03-29,24.802761,24.921104,24.714005,24.852072,20.550097,4740700
+2004-03-30,24.802761,24.881657,24.704142,24.832348,20.533789,4772900
+2004-03-31,24.881657,24.911243,24.556213,24.644970,20.378845,8254000
+2004-04-01,24.753452,25.029585,23.954636,25.000000,20.672421,8353000
+2004-04-02,25.522682,25.631165,25.197239,25.591717,21.161707,9083600
+2004-04-05,25.463511,26.213018,25.355030,26.134123,21.610224,8190300
+2004-04-06,25.936884,26.282051,25.867849,26.222879,21.683611,6812900
+2004-04-07,25.976332,26.104536,25.650888,25.650888,21.210632,6405300
+2004-04-08,26.124260,26.124260,25.562130,25.887573,21.406351,4121700
+2004-04-12,25.493097,25.542406,25.157791,25.345167,20.957834,6512700
+2004-04-13,25.295858,25.384615,24.615385,24.654833,20.387003,10038900
+2004-04-14,24.556213,24.792900,24.457594,24.654833,20.387003,9549300
+2004-04-15,24.802761,24.832348,24.289940,24.358974,20.142357,7706800
+2004-04-16,24.358974,24.556213,24.092703,24.556213,20.305450,11860500
+2004-04-19,24.280079,24.743589,24.250494,24.615385,20.354380,5869600
+2004-04-20,24.714005,24.940828,24.398422,24.457594,20.223907,8470100
+2004-04-21,24.358974,24.694281,24.309664,24.654833,20.387003,8373500
+2004-04-22,24.753452,24.901381,24.536489,24.536489,20.289146,7475500
+2004-04-23,24.536489,24.595661,24.220907,24.309664,20.101585,8112800
+2004-04-26,24.378698,24.516766,23.767258,24.043392,19.881399,9160900
+2004-04-27,24.161736,24.201183,23.846153,23.846153,19.718307,9649600
+2004-04-28,23.422091,23.619329,23.274162,23.619329,19.530745,28457900
+2004-04-29,23.865877,24.063116,23.353058,23.372782,19.326874,17110100
+2004-04-30,23.372782,23.422091,22.583826,22.712032,18.780502,17002600
+2004-05-03,22.859961,22.928993,22.633137,22.810652,18.862053,9414100
+2004-05-04,22.633137,22.938856,22.544378,22.702169,18.772348,10834500
+2004-05-05,22.682446,23.037476,22.534517,22.682446,18.756041,10131900
+2004-05-06,22.495070,22.652861,22.159763,22.435898,18.552168,9464500
+2004-05-07,22.238659,22.603550,22.021696,22.140039,18.307529,7844200
+2004-05-10,21.804733,22.287968,21.094675,21.794872,18.022106,12050700
+2004-05-11,22.287968,22.879684,22.278107,22.662722,18.739727,14417000
+2004-05-12,22.672585,22.879684,22.337278,22.682446,18.756041,10151300
+2004-05-13,23.057199,23.254438,22.781065,22.978304,19.000683,18064300
+2004-05-14,23.027613,23.126232,22.741617,22.919132,18.951754,7106200
+2004-05-17,22.633137,22.672585,22.376726,22.583826,18.674494,6660700
+2004-05-18,22.771204,22.978304,22.524654,22.524654,18.625568,5237700
+2004-05-19,22.761341,23.086784,22.534517,22.682446,18.756041,8092000
+2004-05-20,22.593689,22.859961,22.544378,22.731756,18.796816,4252800
+2004-05-21,22.879684,23.037476,22.366863,22.366863,18.495085,7055800
+2004-05-24,22.583826,22.928993,22.504930,22.879684,18.919134,5980900
+2004-05-25,22.879684,23.422091,22.712032,23.422091,19.367649,5406800
+2004-05-26,23.333334,23.520710,23.205128,23.333334,19.294258,5472500
+2004-05-27,23.520710,23.599606,23.284023,23.382643,19.335032,5806600
+2004-05-28,23.471401,23.540434,23.145956,23.145956,19.139317,3881100
+2004-06-01,23.471401,23.609467,23.106508,23.510849,19.441044,7338400
+2004-06-02,23.619329,24.112427,23.057199,24.082840,19.914019,8756100
+2004-06-03,23.915188,24.181459,23.629190,23.639053,19.547050,6511500
+2004-06-04,23.816568,24.043392,23.678501,23.727810,19.620449,4917000
+2004-06-07,23.915188,24.181459,23.648914,24.063116,19.897711,6580600
+2004-06-08,24.112427,24.270218,23.994083,24.260355,20.060806,5969000
+2004-06-09,24.260355,24.526627,24.132151,24.240631,20.044497,7254900
+2004-06-10,24.181459,24.506903,24.171598,24.378698,20.158665,5604400
+2004-06-14,24.211046,24.447731,24.191322,24.378698,20.158665,7389100
+2004-06-15,24.378698,24.556213,24.240631,24.358974,20.142357,6140800
+2004-06-16,24.309664,24.595661,24.220907,24.526627,20.280985,6156800
+2004-06-17,24.428007,24.654833,24.358974,24.516766,20.272831,4897800
+2004-06-18,24.526627,24.674557,24.388559,24.674557,20.403309,7330600
+2004-06-21,24.556213,24.664694,24.428007,24.605522,20.346224,6368300
+2004-06-22,24.428007,24.605522,24.408283,24.575937,20.321762,6762400
+2004-06-23,24.467455,24.605522,23.964497,24.309664,20.101585,8472000
+2004-06-24,24.358974,25.078896,24.220907,24.990139,20.664265,17940000
+2004-06-25,24.852072,25.069033,24.743589,24.832348,20.533789,7795300
+2004-06-28,24.842209,25.216963,24.783037,24.891520,20.582714,5434500
+2004-06-29,24.901381,25.216963,24.812624,25.138067,20.786587,5304600
+2004-06-30,25.147928,25.246548,24.990139,25.138067,20.786587,6253700
+2004-07-01,24.990139,25.147928,24.763313,24.930967,20.615335,6083300
+2004-07-02,25.000000,25.069033,24.792900,24.921104,20.607180,4261700
+2004-07-06,24.921104,24.921104,24.339251,24.358974,20.142357,5907300
+2004-07-07,24.408283,24.526627,24.358974,24.388559,20.166821,4481000
+2004-07-08,24.309664,24.408283,24.003944,24.013807,19.856941,4898900
+2004-07-09,24.260355,24.309664,23.984221,24.260355,20.060806,5811300
+2004-07-12,24.230770,24.388559,23.964497,24.013807,19.856941,5899100
+2004-07-13,23.816568,24.122288,23.816568,23.895464,19.759075,3803500
+2004-07-14,23.737673,24.063116,23.688362,23.856016,19.726463,4426500
+2004-07-15,23.816568,23.885601,23.530573,23.540434,19.465509,4519800
+2004-07-16,23.757397,23.757397,23.086784,23.126232,19.123007,6912500
+2004-07-19,23.076923,23.244576,22.781065,23.165680,19.155622,7626100
+2004-07-20,23.106508,23.609467,23.076923,23.570021,19.489971,7145200
+2004-07-21,23.570021,23.609467,22.879684,23.313610,19.277950,6016400
+2004-07-22,22.830376,23.126232,22.633137,22.988165,19.008837,6909300
+2004-07-23,22.958580,23.106508,22.564102,22.642998,18.723423,6000700
+2004-07-26,22.633137,22.820513,22.376726,22.534517,18.633718,5298100
+2004-07-27,22.633137,22.771204,22.524654,22.633137,18.715269,5881200
+2004-07-28,22.583826,22.800789,22.258383,22.524654,18.625568,5682500
+2004-07-29,22.583826,22.899408,22.583826,22.682446,18.756041,4121700
+2004-07-30,22.583826,22.830376,22.495070,22.771204,18.829430,4829600
+2004-08-02,22.781065,22.859961,22.583826,22.781065,18.837589,5221800
+2004-08-03,22.702169,22.761341,22.376726,22.455622,18.568483,4813400
+2004-08-04,22.287968,22.504930,22.041420,22.179487,18.340145,6766500
+2004-08-05,22.337278,22.435898,21.942801,21.972387,18.168896,6142800
+2004-08-06,21.725838,21.942801,21.548323,21.686390,17.932404,6310000
+2004-08-09,21.686390,21.863905,21.538462,21.637081,17.891630,5258200
+2004-08-10,21.676529,22.278107,21.646942,22.130178,18.299376,9330000
+2004-08-11,21.913216,21.913216,21.222879,21.479290,17.761154,12022000
+2004-08-12,21.203156,21.291914,20.710060,20.917160,17.296333,13050700
+2004-08-13,20.857988,21.104536,20.591717,20.601578,17.035378,10863600
+2004-08-16,20.710060,21.439842,20.670612,21.252466,17.573593,11475500
+2004-08-17,21.252466,21.765285,21.252466,21.676529,17.924250,6828900
+2004-08-18,21.646942,22.021696,21.449703,21.952663,18.152584,4776100
+2004-08-19,21.893492,22.110455,21.814596,22.090731,18.266756,4996100
+2004-08-20,21.942801,22.199211,21.923077,22.130178,18.299376,6501400
+2004-08-23,22.041420,22.317554,21.844181,21.863905,18.079191,4156200
+2004-08-24,22.090731,22.287968,21.893492,22.001972,18.193357,5288900
+2004-08-25,21.903353,22.376726,21.903353,22.189348,18.348299,4877500
+2004-08-26,22.258383,22.465483,22.169624,22.366863,18.495085,2890200
+2004-08-27,22.287968,22.435898,22.080868,22.347139,18.478773,3744800
+2004-08-30,22.189348,22.327415,22.071007,22.090731,18.266756,3716000
+2004-08-31,22.218935,22.278107,22.021696,22.140039,18.307529,5102700
+2004-09-01,22.149900,22.218935,21.982248,22.169624,18.331989,4672100
+2004-09-02,22.041420,22.268244,21.923077,22.228796,18.380917,5101200
+2004-09-03,22.189348,22.376726,21.992111,22.061144,18.242290,4263700
+2004-09-07,22.051283,22.475346,22.051283,22.376726,18.503242,5707100
+2004-09-08,22.544378,22.761341,22.455622,22.573965,18.666334,6233500
+2004-09-09,22.564102,22.721893,22.495070,22.544378,18.641876,6006500
+2004-09-10,22.702169,23.067060,22.583826,22.840237,18.886518,6515600
+2004-09-13,22.889545,23.155819,22.879684,22.998028,19.016993,4228900
+2004-09-14,22.998028,23.057199,22.682446,22.928993,18.959909,6721600
+2004-09-15,22.928993,22.978304,22.613413,22.692308,18.764196,6507600
+2004-09-16,22.613413,23.254438,22.613413,23.076923,19.082233,5067900
+2004-09-17,23.323471,23.323471,23.067060,23.096647,19.098541,5722600
+2004-09-20,22.938856,23.106508,22.682446,22.810652,18.862053,6169200
+2004-09-21,22.978304,23.096647,22.682446,23.086784,19.090385,5222500
+2004-09-22,22.958580,23.175543,22.889545,22.978304,19.000683,4665000
+2004-09-23,22.879684,23.165680,22.840237,22.938856,18.968063,5382500
+2004-09-24,22.879684,23.264299,22.879684,23.136095,19.131163,3957200
+2004-09-27,22.938856,23.027613,22.790928,22.850100,18.894670,4819900
+2004-09-28,22.830376,22.948717,22.041420,22.287968,18.429846,13162000
+2004-09-29,22.297831,22.485207,21.696253,22.485207,18.592943,11481300
+2004-09-30,22.238659,22.445759,22.080868,22.238659,18.389072,9101800
+2004-10-01,22.248520,22.850100,22.199211,22.761341,18.821276,6011200
+2004-10-04,23.412230,23.412230,22.869822,23.214991,19.196396,7267300
+2004-10-05,23.254438,23.658777,23.195267,23.550297,19.473663,8128200
+2004-10-06,23.639053,24.013807,23.451677,24.003944,19.848782,9220000
+2004-10-07,24.033531,24.615385,23.944773,24.408283,20.183132,13855200
+2004-10-08,24.408283,24.704142,24.220907,24.506903,20.264677,10229500
+2004-10-11,24.506903,24.664694,24.437870,24.556213,20.305450,5528800
+2004-10-12,24.309664,24.516766,24.240631,24.497042,20.256525,9510000
+2004-10-13,24.487179,24.753452,24.487179,24.664694,20.395155,9928900
+2004-10-14,24.664694,24.684418,24.329388,24.506903,20.264677,8905600
+2004-10-15,24.477318,24.664694,24.260355,24.566074,20.313601,8777700
+2004-10-18,24.654833,24.802761,24.635109,24.733728,20.452240,6604800
+2004-10-19,24.704142,24.763313,24.467455,24.546350,20.297298,6310200
+2004-10-20,24.437870,24.556213,24.289940,24.398422,20.174974,5588200
+2004-10-21,24.378698,24.950691,24.161736,24.950691,20.631643,7963600
+2004-10-22,24.753452,24.783037,24.211046,24.280079,20.077118,5833300
+2004-10-25,24.072979,24.260355,23.934912,24.250494,20.052656,5801200
+2004-10-26,24.161736,24.585798,24.023668,24.526627,20.280985,5981000
+2004-10-27,24.418146,24.635109,24.161736,24.625246,20.362534,5273300
+2004-10-28,24.625246,24.852072,24.437870,24.773176,20.484861,4966600
+2004-10-29,24.654833,24.911243,24.556213,24.871796,20.566410,6364800
+2004-11-01,24.783037,25.069033,24.447731,24.881657,20.574562,5374000
+2004-11-02,24.625246,25.502958,24.625246,25.108480,20.762121,8448300
+2004-11-03,25.542406,25.571993,25.108480,25.285995,20.908907,6034000
+2004-11-04,25.374754,26.025640,25.325443,25.956608,21.463432,6879600
+2004-11-05,25.956608,26.301775,25.788954,26.065088,21.553139,7305000
+2004-11-08,25.788954,25.936884,25.601578,25.680473,21.235098,5573900
+2004-11-09,25.641026,26.124260,25.641026,25.907297,21.422659,5874700
+2004-11-10,26.035503,26.489151,25.996056,26.291914,21.740700,7941900
+2004-11-11,26.429979,26.429979,25.986193,26.222879,21.683611,6625100
+2004-11-12,26.183432,26.587770,26.104536,26.429979,21.854864,5177900
+2004-11-15,26.489151,26.597633,26.262327,26.360947,21.797779,4973700
+2004-11-16,26.183432,26.390533,26.055227,26.163708,21.634686,5976400
+2004-11-17,26.183432,26.508875,26.104536,26.193295,21.659149,6226900
+2004-11-18,26.104536,26.321499,25.956608,26.005917,21.504206,6068000
+2004-11-19,26.528599,26.706114,26.134123,26.291914,21.740700,9301000
+2004-11-22,26.311638,26.844181,26.134123,26.814596,22.172903,6697100
+2004-11-23,26.824457,26.992111,26.735701,26.942801,22.278913,6172100
+2004-11-24,27.021696,27.071007,26.834320,26.962524,22.295225,3397300
+2004-11-26,26.854044,26.962524,26.528599,26.745562,22.115816,1487900
+2004-11-29,26.962524,27.061144,26.331362,26.686390,22.066891,5666100
+2004-11-30,26.607494,26.873768,26.508875,26.508875,21.920099,4972000
+2004-12-01,26.597633,27.396450,26.587770,27.297831,22.572487,7990600
+2004-12-02,27.297831,27.603550,27.228796,27.238659,22.523558,5923100
+2004-12-03,27.140039,27.366863,26.903353,26.992111,22.319687,5588800
+2004-12-06,26.992111,27.120316,26.824457,26.942801,22.278913,3900100
+2004-12-07,26.863905,27.100592,26.617357,26.725838,22.099512,5971300
+2004-12-08,26.696253,26.824457,26.311638,26.824457,22.379250,6283600
+2004-12-09,26.824457,27.258383,26.775148,27.238659,22.724815,7446300
+2004-12-10,26.637081,27.593689,26.637081,27.248520,22.733042,4063000
+2004-12-13,27.258383,27.406311,27.209072,27.347139,22.815319,5122600
+2004-12-14,27.347139,27.357002,27.199211,27.278107,22.757727,5529900
+2004-12-15,27.120316,27.327415,27.100592,27.218935,22.708359,6937200
+2004-12-16,27.218935,27.475346,27.051283,27.169624,22.667219,6130800
+2004-12-17,27.179487,27.366863,26.972387,26.992111,22.519123,8176400
+2004-12-20,27.051283,27.248520,26.854044,27.041420,22.560259,4615600
+2004-12-21,26.992111,27.268244,26.913216,27.100592,22.609631,4383500
+2004-12-22,27.189348,27.268244,27.001972,27.248520,22.733042,4295800
+2004-12-23,27.179487,27.297831,27.140039,27.209072,22.700129,2941600
+2004-12-27,27.278107,27.465483,27.209072,27.366863,22.831774,2947900
+2004-12-28,27.386587,27.603550,27.307692,27.603550,23.029238,3353000
+2004-12-29,27.495070,27.593689,27.435898,27.564102,22.996328,3598700
+2004-12-30,27.593689,27.642998,27.495070,27.495070,22.938732,3950200
+2004-12-31,27.593689,27.613413,27.376726,27.416174,22.872910,2452500
+2005-01-03,27.426035,27.869822,27.426035,27.465483,22.914049,5700700
+2005-01-04,27.830376,27.830376,27.140039,27.169624,22.667219,5895800
+2005-01-05,27.366863,27.366863,27.011835,27.021696,22.543806,5456300
+2005-01-06,27.051283,27.475346,26.982248,27.061144,22.576715,5608600
+2005-01-07,27.120316,27.159763,26.676529,26.794872,22.354568,6626400
+2005-01-10,26.775148,27.001972,26.725838,26.903353,22.445076,4511600
+2005-01-11,26.785009,27.061144,26.735701,26.834320,22.387480,7644300
+2005-01-12,26.844181,27.495070,26.785009,27.337278,22.807095,7920700
+2005-01-13,27.495070,27.495070,27.248520,27.366863,22.831774,6745500
+2005-01-14,27.692308,28.106508,27.662722,27.909269,23.284296,12506700
+2005-01-18,27.919132,28.451677,27.859961,28.343195,23.646313,9680300
+2005-01-19,28.372782,28.540434,28.067060,28.106508,23.448845,8634700
+2005-01-20,28.007889,28.313610,27.859961,28.126232,23.465303,5965600
+2005-01-21,28.126232,28.195267,27.761341,27.761341,23.160883,6258800
+2005-01-24,27.761341,27.879684,27.514793,27.564102,22.996328,6123300
+2005-01-25,27.790928,28.451677,27.564102,28.402367,23.695677,10046800
+2005-01-26,28.510849,28.510849,27.919132,28.185404,23.514673,7492900
+2005-01-27,28.027613,28.165680,27.672585,27.909269,23.284296,7244000
+2005-01-28,27.810652,27.988165,27.573965,27.840237,23.226702,7797900
+2005-01-31,28.205128,28.451677,28.057199,28.234715,23.555811,6914800
+2005-02-01,28.353058,28.570021,28.037476,28.402367,23.695677,10890900
+2005-02-02,28.491125,28.500986,28.284023,28.431953,23.720367,7764500
+2005-02-03,28.461538,28.461538,28.126232,28.402367,23.695677,5207500
+2005-02-04,28.372782,28.954636,28.274162,28.905325,24.115288,9381700
+2005-02-07,29.013807,29.487179,28.856016,29.467455,24.584269,9401400
+2005-02-08,29.467455,29.575937,29.211046,29.428007,24.551355,6021000
+2005-02-09,29.339251,29.418146,28.944773,28.944773,24.148201,6624500
+2005-02-10,29.122288,29.161736,28.816568,28.944773,24.148201,5477100
+2005-02-11,28.836292,29.132151,28.579882,28.934912,24.139973,6099400
+2005-02-14,28.836292,29.033531,28.599606,28.984221,24.181107,5148200
+2005-02-15,28.905325,29.171598,28.836292,29.171598,24.337440,5095900
+2005-02-16,29.092703,29.092703,28.865877,28.925049,24.131748,4132300
+2005-02-17,28.836292,29.063116,28.747534,28.944773,24.148201,4826000
+2005-02-18,28.954636,28.964497,28.658777,28.658777,23.909603,5196000
+2005-02-22,28.441814,28.599606,28.027613,28.037476,23.391254,7573400
+2005-02-23,28.037476,28.441814,27.712032,28.254438,23.572264,6046900
+2005-02-24,28.145956,28.372782,27.702169,27.761341,23.160883,8556400
+2005-02-25,27.712032,28.106508,27.130178,27.504930,22.946962,10208300
+2005-02-28,27.357002,27.554241,27.218935,27.554241,22.988100,6598100
+2005-03-01,27.583826,28.116371,27.514793,28.086784,23.432392,7577300
+2005-03-02,28.007889,28.451677,27.909269,28.293886,23.605179,4695900
+2005-03-03,28.293886,28.372782,28.017752,28.175543,23.506445,6932200
+2005-03-04,28.353058,28.570021,28.214991,28.481262,23.761499,6137600
+2005-03-07,28.471401,28.599606,28.353058,28.560158,23.827320,4113100
+2005-03-08,28.353058,28.362919,28.037476,28.136095,23.473530,6435400
+2005-03-09,27.988165,28.126232,27.307692,27.593689,23.021008,9299100
+2005-03-10,27.495070,27.820513,27.426035,27.613413,23.037466,4979600
+2005-03-11,27.731756,27.820513,27.090731,27.209072,22.700129,6971300
+2005-03-14,27.613413,27.712032,27.248520,27.633137,23.053921,7987200
+2005-03-15,27.909269,28.570021,27.909269,28.205128,23.531122,10866800
+2005-03-16,28.057199,28.520710,28.007889,28.254438,23.572264,9860900
+2005-03-17,28.451677,28.491125,27.928993,27.978304,23.341890,6480700
+2005-03-18,28.047337,28.047337,27.435898,27.702169,23.111513,11750000
+2005-03-21,27.830376,27.998028,27.514793,27.790928,23.185564,4424500
+2005-03-22,27.692308,28.017752,27.396450,27.396450,22.856455,5138300
+2005-03-23,27.524654,27.642998,27.347139,27.396450,22.856455,5261400
+2005-03-24,27.416174,27.761341,27.366863,27.366863,22.831774,4842800
+2005-03-28,27.544378,28.027613,27.524654,27.781065,23.177334,5192700
+2005-03-29,27.712032,27.859961,27.416174,27.514793,22.955189,6246100
+2005-03-30,27.613413,28.057199,27.583826,27.958580,23.325434,4925300
+2005-03-31,28.007889,28.451677,27.859961,28.333334,23.638086,6445500
+2005-04-01,28.392506,28.491125,28.116371,28.185404,23.514673,5262300
+2005-04-04,28.254438,28.274162,27.928993,28.037476,23.391254,4674400
+2005-04-05,27.978304,28.264299,27.919132,28.145956,23.481758,4475200
+2005-04-06,28.254438,28.274162,27.948717,28.017752,23.374804,3759700
+2005-04-07,28.017752,28.313610,27.958580,28.303747,23.613401,4781200
+2005-04-08,28.244576,28.599606,27.909269,28.007889,23.366571,4550800
+2005-04-11,27.998028,28.067060,27.662722,27.741617,23.144424,4417200
+2005-04-12,27.712032,27.820513,27.337278,27.751480,23.152651,6183500
+2005-04-13,27.642998,27.909269,27.317554,27.317554,22.790634,4382000
+2005-04-14,27.258383,27.495070,27.140039,27.248520,22.733042,5632900
+2005-04-15,27.179487,27.366863,26.952663,26.992111,22.519123,7609500
+2005-04-18,26.627218,26.923077,26.568047,26.568047,22.165335,6057200
+2005-04-19,26.479290,26.913216,26.479290,26.775148,22.338114,6152800
+2005-04-20,26.577909,26.854044,26.232742,26.252466,21.902052,5370100
+2005-04-21,26.439842,27.366863,26.341223,27.287968,22.765951,7222800
+2005-04-22,27.218935,27.327415,26.568047,26.923077,22.461529,4964200
+2005-04-25,26.962524,27.041420,26.676529,26.824457,22.379250,4363000
+2005-04-26,26.735701,26.745562,26.429979,26.429979,22.050146,5809400
+2005-04-27,26.331362,26.528599,26.143984,26.469427,22.083055,5512800
+2005-04-28,26.232742,26.321499,25.463511,25.542406,21.309656,9951100
+2005-04-29,25.690336,26.035503,25.355030,26.035503,21.721043,8373900
+2005-05-02,26.203156,26.351086,25.749506,25.966469,21.663443,6492200
+2005-05-03,25.966469,26.084812,25.660749,25.887573,21.597624,7075600
+2005-05-04,25.887573,26.449703,25.818541,26.420118,22.041914,5220800
+2005-05-05,26.331362,26.587770,26.262327,26.341223,21.976093,4438900
+2005-05-06,26.577909,26.775148,26.439842,26.518738,22.124193,5811900
+2005-05-09,26.577909,26.804733,26.577909,26.686390,22.264063,5959700
+2005-05-10,26.528599,26.696253,26.311638,26.577909,22.173563,5370500
+2005-05-11,26.607494,27.396450,25.887573,26.301775,21.943186,10822600
+2005-05-12,26.577909,26.715977,26.252466,26.380671,22.009007,7840800
+2005-05-13,26.538462,26.725838,26.390533,26.627218,22.214697,7180300
+2005-05-16,26.676529,26.794872,26.558186,26.696253,22.272289,6106700
+2005-05-17,26.577909,27.021696,26.538462,26.903353,22.445076,6309700
+2005-05-18,27.061144,27.218935,26.785009,27.120316,22.626083,7476300
+2005-05-19,27.307692,27.633137,27.268244,27.554241,22.988100,6663200
+2005-05-20,27.613413,27.613413,27.416174,27.445759,22.897594,6159000
+2005-05-23,27.386587,27.445759,27.149900,27.169624,22.667219,8415200
+2005-05-24,27.071007,27.357002,27.041420,27.278107,22.757727,6052200
+2005-05-25,27.031559,27.366863,26.923077,27.268244,22.749496,6819900
+2005-05-26,27.416174,27.544378,27.357002,27.416174,22.872910,5111800
+2005-05-27,27.376726,27.613413,27.347139,27.495070,22.938732,3823100
+2005-05-31,27.297831,27.337278,27.041420,27.061144,22.576715,6229200
+2005-06-01,27.130178,27.426035,26.982248,27.199211,22.691906,5883600
+2005-06-02,27.199211,27.218935,27.011835,27.061144,22.576715,5426700
+2005-06-03,26.972387,27.061144,26.873768,26.942801,22.477987,5512300
+2005-06-06,26.893492,27.080868,26.686390,26.706114,22.280521,6630200
+2005-06-07,26.775148,27.120316,26.775148,26.913216,22.453300,7214400
+2005-06-08,27.021696,27.238659,26.982248,27.031559,22.552034,5918000
+2005-06-09,26.972387,27.278107,26.952663,27.169624,22.667219,4984800
+2005-06-10,27.169624,27.327415,26.331362,27.140039,22.642538,6144100
+2005-06-13,27.021696,27.495070,26.923077,27.376726,22.839998,5973000
+2005-06-14,27.465483,27.603550,27.218935,27.258383,22.741268,6910100
+2005-06-15,27.416174,27.416174,26.607494,26.666666,22.247610,10493700
+2005-06-16,26.696253,26.824457,26.400394,26.410257,22.033688,6877100
+2005-06-17,26.804733,26.804733,26.232742,26.331362,21.967865,9605400
+2005-06-20,26.163708,26.331362,26.045364,26.222879,21.877363,5529000
+2005-06-21,26.311638,26.518738,26.203156,26.360947,21.992548,3889900
+2005-06-22,26.528599,26.627218,26.232742,26.321499,21.959639,4640200
+2005-06-23,26.301775,26.429979,25.887573,25.907297,21.614079,4408800
+2005-06-24,25.650888,25.927021,25.325443,25.680473,21.424845,6237900
+2005-06-27,25.680473,25.788954,25.118343,25.276134,21.087507,6919700
+2005-06-28,25.404339,25.690336,25.345167,25.502958,21.276745,6328600
+2005-06-29,25.404339,25.483234,25.019724,25.078896,20.922956,8081900
+2005-06-30,25.325443,25.394478,24.773176,24.832348,20.717264,7741900
+2005-07-01,24.871796,25.138067,24.615385,24.743589,20.643211,5246400
+2005-07-05,24.733728,24.901381,24.526627,24.694281,20.602074,5699000
+2005-07-06,24.585798,24.714005,24.408283,24.447731,20.396381,7364200
+2005-07-07,24.132151,24.319527,24.013807,24.112427,20.116644,10412400
+2005-07-08,24.072979,24.911243,24.043392,24.654833,20.569164,10450400
+2005-07-11,25.000000,25.118343,24.684418,24.832348,20.717264,8899400
+2005-07-12,24.901381,25.364891,24.704142,25.059172,20.906502,10279000
+2005-07-13,25.236687,25.522682,25.049309,25.355030,21.153332,7602100
+2005-07-14,25.562130,26.134123,25.493097,25.966469,21.663443,12601600
+2005-07-15,26.035503,26.104536,25.887573,26.015779,21.704586,7716600
+2005-07-18,25.966469,26.084812,25.650888,25.690336,21.433071,7287000
+2005-07-19,25.660749,25.769230,25.256411,25.404339,21.194466,8689200
+2005-07-20,25.414202,25.512821,25.207100,25.473373,21.252062,6155400
+2005-07-21,25.493097,25.493097,25.147928,25.207100,21.029915,5525900
+2005-07-22,25.177515,25.502958,25.157791,25.473373,21.252062,4500700
+2005-07-25,25.473373,25.571993,25.236687,25.285995,21.095737,3798100
+2005-07-26,25.364891,25.788954,25.305719,25.522682,21.293196,6004900
+2005-07-27,25.828402,25.828402,25.256411,25.473373,21.252062,5699600
+2005-07-28,25.345167,25.749506,25.345167,25.591717,21.350794,4332900
+2005-07-29,25.532545,25.621302,25.276134,25.285995,21.095737,3810700
+2005-08-01,25.285995,25.433926,25.167652,25.256411,21.071054,5380100
+2005-08-02,25.226824,25.295858,25.098619,25.207100,21.029915,4828800
+2005-08-03,25.315582,25.788954,25.167652,25.414202,21.202702,7817000
+2005-08-04,25.394478,25.473373,25.207100,25.266272,21.079281,4040100
+2005-08-05,25.147928,25.236687,24.950691,25.128204,20.964092,3975400
+2005-08-08,25.098619,25.404339,24.940828,25.059172,20.906502,4150200
+2005-08-09,25.285995,25.897436,25.285995,25.779093,21.507124,10842100
+2005-08-10,25.246548,25.591717,24.921104,25.118343,20.955866,12619300
+2005-08-11,25.049309,25.542406,25.049309,25.522682,21.293196,8427900
+2005-08-12,25.473373,25.522682,25.167652,25.404339,21.194466,6490600
+2005-08-15,25.493097,25.897436,25.364891,25.808678,21.531805,9074600
+2005-08-16,25.838264,25.996056,25.719921,25.897436,21.605848,8496800
+2005-08-17,25.808678,26.104536,25.710060,25.927021,21.630531,5835900
+2005-08-18,25.808678,25.897436,25.650888,25.719921,21.457756,6348200
+2005-08-19,25.739645,25.838264,25.562130,25.611441,21.367250,5777500
+2005-08-22,25.552269,25.611441,25.147928,25.374754,21.169786,9465700
+2005-08-23,25.276134,25.493097,25.197239,25.443787,21.227381,7164700
+2005-08-24,25.355030,25.443787,25.049309,25.049309,20.898270,7806400
+2005-08-25,25.138067,25.138067,24.802761,24.852072,20.733721,7693300
+2005-08-26,24.901381,25.128204,24.812624,24.980276,20.840679,6529200
+2005-08-29,24.783037,25.108480,24.753452,25.000000,20.857134,6100300
+2005-08-30,24.792900,25.009861,24.783037,24.940828,20.807768,10782700
+2005-08-31,24.901381,24.901381,24.575937,24.842209,20.725492,9615200
+2005-09-01,24.743589,24.822485,24.309664,24.349112,20.314108,10267900
+2005-09-02,24.349112,24.585798,24.211046,24.418146,20.371700,6456100
+2005-09-06,24.457594,24.911243,24.457594,24.822485,20.709036,7267400
+2005-09-07,24.694281,24.990139,24.684418,24.871796,20.750172,7046300
+2005-09-08,24.694281,24.802761,24.556213,24.605522,20.528027,9236700
+2005-09-09,24.605522,24.950691,24.497042,24.694281,20.602074,8597700
+2005-09-12,24.723866,24.812624,24.566074,24.773176,20.667898,5972900
+2005-09-13,24.773176,24.783037,24.447731,24.467455,20.412840,5971500
+2005-09-14,24.467455,24.635109,23.520710,23.777121,19.836905,13316700
+2005-09-15,23.688362,23.777121,23.570021,23.668638,19.746397,14435400
+2005-09-16,23.767258,23.905325,23.688362,23.777121,19.836905,11916400
+2005-09-19,23.668638,23.727810,23.471401,23.570021,19.664125,8086700
+2005-09-20,23.570021,23.846153,23.412230,23.422091,19.540709,10530900
+2005-09-21,23.264299,23.343195,22.998028,23.007889,19.195145,11683500
+2005-09-22,22.909269,23.027613,22.583826,22.909269,19.112871,11443600
+2005-09-23,22.800789,22.958580,22.652861,22.899408,19.104639,10058500
+2005-09-26,23.076923,23.116371,22.810652,22.938856,19.137554,7326100
+2005-09-27,22.978304,23.165680,22.781065,23.037476,19.219830,8808000
+2005-09-28,23.017752,23.244576,22.899408,23.047337,19.228054,8473100
+2005-09-29,23.047337,23.668638,22.958580,23.629190,19.713490,11698100
+2005-09-30,23.708086,24.013807,23.688362,23.796844,19.853357,8199700
+2005-10-03,23.796844,23.885601,23.629190,23.678501,19.754629,5865000
+2005-10-04,24.280079,24.654833,23.727810,24.220907,20.207151,15721000
+2005-10-05,24.063116,24.230770,23.885601,23.895464,19.935637,9096100
+2005-10-06,23.915188,24.260355,23.589743,23.757397,19.820448,9977300
+2005-10-07,23.767258,23.856016,23.688362,23.767258,19.828676,5814300
+2005-10-10,23.708086,23.786982,23.510849,23.550297,19.647669,6802900
+2005-10-11,23.491125,23.757397,23.372782,23.441814,19.557161,8462900
+2005-10-12,23.422091,23.629190,22.879684,23.017752,19.203371,12678800
+2005-10-13,22.830376,23.136095,22.761341,23.106508,19.277420,8469300
+2005-10-14,23.086784,23.214991,22.879684,23.067060,19.244507,6543500
+2005-10-17,22.919132,23.224852,22.810652,23.165680,19.326786,7327100
+2005-10-18,23.165680,23.214991,22.869822,23.067060,19.244507,9324400
+2005-10-19,22.899408,23.155819,22.583826,23.116371,19.285650,8652800
+2005-10-20,23.007889,23.116371,22.573965,22.642998,18.890720,7726800
+2005-10-21,22.781065,23.067060,22.583826,22.662722,18.907177,8885000
+2005-10-24,22.662722,23.027613,22.652861,22.978304,19.170464,8427800
+2005-10-25,22.850100,23.284023,22.830376,23.195267,19.351469,8799600
+2005-10-26,23.224852,23.520710,23.165680,23.254438,19.400835,6938200
+2005-10-27,23.431953,23.471401,23.027613,23.096647,19.269194,6266100
+2005-10-28,23.205128,23.668638,23.175543,23.491125,19.598299,8806100
+2005-10-31,23.668638,24.161736,23.648914,24.033531,20.050823,10389100
+2005-11-01,23.944773,24.467455,23.944773,24.319527,20.289425,12013300
+2005-11-02,24.260355,24.526627,24.220907,24.477318,20.421066,7457700
+2005-11-03,24.635109,24.654833,24.289940,24.556213,20.486893,9575800
+2005-11-04,24.654833,24.704142,24.250494,24.467455,20.412840,6896400
+2005-11-07,24.654833,24.852072,24.526627,24.812624,20.700806,7906300
+2005-11-08,24.822485,24.911243,24.723866,24.812624,20.700806,7533100
+2005-11-09,24.812624,25.187376,24.812624,25.069033,20.914730,8770600
+2005-11-10,25.049309,25.443787,25.039448,25.355030,21.153332,7591700
+2005-11-11,25.355030,25.641026,25.276134,25.502958,21.276745,8287800
+2005-11-14,25.443787,25.828402,25.394478,25.650888,21.400160,10542000
+2005-11-15,25.571993,25.749506,25.345167,25.700197,21.441298,10912400
+2005-11-16,25.660749,25.798817,25.256411,25.493097,21.268520,8957400
+2005-11-17,25.384615,25.739645,25.345167,25.631165,21.383707,9870200
+2005-11-18,25.147928,25.325443,24.674557,24.852072,20.733721,20659200
+2005-11-21,24.704142,24.822485,24.418146,24.704142,20.610306,9389500
+2005-11-22,24.575937,24.654833,24.388559,24.595661,20.519800,9308500
+2005-11-23,24.783037,24.802761,24.566074,24.615385,20.536251,7701400
+2005-11-25,24.674557,24.842209,24.615385,24.704142,20.610306,2120900
+2005-11-28,24.842209,24.852072,24.497042,24.674557,20.585623,5973800
+2005-11-29,24.714005,24.891520,24.654833,24.733728,20.634989,7619100
+2005-11-30,24.753452,24.753452,24.457594,24.585798,20.511572,9727800
+2005-12-01,24.704142,24.763313,24.556213,24.615385,20.536251,7985100
+2005-12-02,24.546350,24.654833,24.477318,24.536489,20.470432,5779500
+2005-12-05,24.477318,24.714005,24.467455,24.664694,20.577393,9768400
+2005-12-06,24.802761,25.226824,24.704142,25.167652,20.997000,10449400
+2005-12-07,25.147928,25.345167,24.980276,25.315582,21.120420,8546600
+2005-12-08,24.980276,25.157791,24.625246,24.704142,20.829390,7988400
+2005-12-09,24.733728,24.901381,24.684418,24.842209,20.945799,5620600
+2005-12-12,24.940828,25.098619,24.802761,24.802761,20.912539,4492000
+2005-12-13,24.753452,24.930967,24.694281,24.783037,20.895912,6896800
+2005-12-14,24.704142,24.783037,24.497042,24.654833,20.787811,8766800
+2005-12-15,24.654833,24.743589,24.368835,24.398422,20.571617,8150900
+2005-12-16,24.556213,24.566074,24.201183,24.358974,20.538359,16282000
+2005-12-19,23.984221,24.506903,23.984221,24.201183,20.405321,6918800
+2005-12-20,24.181459,24.339251,23.885601,23.934912,20.180811,9197000
+2005-12-21,23.934912,24.043392,23.619329,23.767258,20.039454,7957100
+2005-12-22,23.786982,23.875740,23.629190,23.856016,20.114290,6690200
+2005-12-23,23.619329,24.043392,23.619329,23.934912,20.180811,3356200
+2005-12-27,24.003944,24.063116,23.767258,23.806705,20.072708,4647700
+2005-12-28,23.777121,23.964497,23.777121,23.796844,20.064398,4597500
+2005-12-29,23.846153,23.944773,23.767258,23.836292,20.097654,4938800
+2005-12-30,23.816568,23.856016,23.619329,23.639053,19.931351,7338300
+2006-01-03,23.747534,24.142012,23.668638,24.063116,20.288900,9792600
+2006-01-04,24.161736,24.349112,23.441814,23.658777,19.947985,13341800
+2006-01-05,23.796844,24.349112,23.767258,24.072979,20.297220,8818900
+2006-01-06,24.309664,24.447731,24.102564,24.398422,20.571617,6502200
+2006-01-09,24.506903,24.684418,24.378698,24.654833,20.787811,6682500
+2006-01-10,24.674557,24.980276,24.654833,24.970415,21.053896,11961300
+2006-01-11,25.029585,25.433926,24.901381,25.207100,21.253456,12367200
+2006-01-12,25.295858,25.433926,25.167652,25.285995,21.319979,10035500
+2006-01-13,25.256411,25.384615,25.147928,25.345167,21.369875,8186600
+2006-01-17,25.473373,25.473373,25.029585,25.029585,21.103786,9457300
+2006-01-18,24.881657,25.000000,24.644970,24.852072,20.954117,8041500
+2006-01-19,24.566074,25.936884,24.566074,25.877712,21.818890,26908400
+2006-01-20,25.739645,26.134123,25.246548,25.364891,21.386497,19245800
+2006-01-23,25.394478,25.443787,24.901381,25.167652,21.220198,11469800
+2006-01-24,25.246548,25.769230,25.246548,25.631165,21.611008,22475500
+2006-01-25,25.828402,25.838264,25.000000,25.088757,21.153675,33518100
+2006-01-26,25.147928,25.226824,24.556213,24.733728,20.854338,32967500
+2006-01-27,24.812624,24.930967,24.644970,24.733728,20.854338,11893100
+2006-01-30,24.871796,25.226824,24.832348,25.108480,21.170307,20840800
+2006-01-31,25.147928,25.157791,24.881657,24.960552,21.045580,16215800
+2006-02-01,24.802761,24.990139,24.802761,24.921104,21.012320,12140500
+2006-02-02,24.802761,24.822485,24.654833,24.753452,20.870966,10785800
+2006-02-03,24.635109,24.763313,24.556213,24.664694,20.796124,12191600
+2006-02-06,24.852072,25.000000,24.615385,24.615385,20.754549,12518200
+2006-02-07,25.216963,26.508875,25.147928,26.331362,22.201382,48697700
+2006-02-08,26.291914,26.666666,26.232742,26.528599,22.367685,24850600
+2006-02-09,26.489151,26.489151,26.203156,26.351086,22.218014,14056500
+2006-02-10,26.242603,26.390533,26.104536,26.311638,22.184752,10368700
+2006-02-13,26.311638,26.390533,25.887573,26.045364,21.960245,9762200
+2006-02-14,26.193295,26.380671,26.124260,26.380671,22.242956,12134200
+2006-02-15,26.380671,26.538462,26.252466,26.508875,22.351057,12263400
+2006-02-16,26.528599,26.725838,26.429979,26.676529,22.492413,11120700
+2006-02-17,26.676529,26.814596,26.459566,26.538462,22.375999,8215200
+2006-02-21,26.143984,26.903353,26.143984,26.785009,22.583879,10794700
+2006-02-22,26.854044,27.169624,26.775148,27.080868,22.833330,11110800
+2006-02-23,27.080868,27.396450,26.992111,27.327415,23.041210,13019700
+2006-02-24,27.327415,27.603550,27.228796,27.593689,23.265717,11978200
+2006-02-27,27.613413,28.096647,27.593689,27.988165,23.598328,14875500
+2006-02-28,28.067060,28.067060,27.435898,27.603550,23.274036,11010400
+2006-03-01,27.613413,27.790928,27.544378,27.702169,23.357191,9223300
+2006-03-02,27.613413,27.682446,27.357002,27.642998,23.307291,9713500
+2006-03-03,27.554241,28.047337,27.524654,27.830376,23.465282,7982600
+2006-03-06,27.761341,27.830376,27.495070,27.495070,23.182566,7482000
+2006-03-07,27.426035,27.672585,27.366863,27.623274,23.290663,7848800
+2006-03-08,27.603550,27.761341,27.524654,27.662722,23.323925,7362300
+2006-03-09,27.741617,27.879684,27.445759,27.702169,23.357191,7103500
+2006-03-10,27.652861,27.909269,27.554241,27.771204,23.415392,8196800
+2006-03-13,27.879684,28.224852,27.771204,28.165680,23.747993,10555000
+2006-03-14,28.067060,28.264299,28.027613,28.264299,23.831142,5686000
+2006-03-15,28.205128,28.451677,28.155819,28.353058,23.905983,6737900
+2006-03-16,28.412230,28.451677,28.116371,28.284023,23.847776,6098600
+2006-03-17,28.392506,28.402367,28.145956,28.244576,23.814516,8088700
+2006-03-20,28.284023,28.412230,28.126232,28.244576,23.814516,6356400
+2006-03-21,28.175543,28.234715,27.652861,27.712032,23.365501,7790000
+2006-03-22,27.721893,27.810652,27.366863,27.406311,23.107735,13212800
+2006-03-23,27.514793,27.514793,27.031559,27.140039,22.883224,12656400
+2006-03-24,27.110455,27.297831,26.972387,27.071007,22.825020,7013700
+2006-03-27,27.021696,27.159763,26.923077,26.962524,22.733551,8444800
+2006-03-28,27.031559,27.061144,26.686390,26.715977,22.525669,13011300
+2006-03-29,26.844181,27.731756,26.785009,27.475346,23.165936,13035300
+2006-03-30,27.297831,27.702169,27.268244,27.485207,23.174255,7447700
+2006-03-31,27.465483,27.662722,27.307692,27.504930,23.190882,8016400
+2006-04-03,27.485207,27.682446,27.426035,27.554241,23.232460,7126200
+2006-04-04,27.475346,27.859961,27.396450,27.524654,23.207512,7562000
+2006-04-05,27.485207,27.554241,27.248520,27.406311,23.107735,7694200
+2006-04-06,27.297831,27.445759,27.001972,27.297831,23.016264,6582800
+2006-04-07,27.366863,27.613413,26.992111,27.149900,22.891539,6069800
+2006-04-10,27.169624,27.613413,27.169624,27.406311,23.107735,6751900
+2006-04-11,27.426035,27.662722,27.238659,27.386587,23.091101,5958900
+2006-04-12,27.386587,27.613413,27.347139,27.534517,23.215830,4981300
+2006-04-13,27.435898,27.613413,27.386587,27.495070,23.182566,3668100
+2006-04-17,27.514793,27.514793,27.169624,27.268244,22.991318,5927700
+2006-04-18,27.357002,27.800789,27.317554,27.702169,23.357191,7357700
+2006-04-19,27.702169,27.712032,27.090731,27.100592,22.849962,7733100
+2006-04-20,27.080868,27.159763,26.913216,26.962524,22.733551,8014500
+2006-04-21,27.159763,27.159763,26.538462,26.646942,22.467466,7740100
+2006-04-24,26.439842,26.962524,26.380671,26.893492,22.675348,6908900
+2006-04-25,27.051283,27.159763,26.499014,26.627218,22.450834,8160100
+2006-04-26,26.627218,27.061144,26.607494,26.824457,22.617138,6622500
+2006-04-27,26.627218,27.465483,26.627218,27.376726,23.082787,9715800
+2006-04-28,27.327415,27.761341,27.268244,27.573965,23.249088,11732600
+2006-05-01,27.623274,27.790928,27.248520,27.287968,23.007950,8391000
+2006-05-02,27.514793,27.642998,27.396450,27.504930,23.190882,6901300
+2006-05-03,27.564102,27.771204,27.504930,27.721893,23.373816,8562000
+2006-05-04,27.771204,28.037476,27.771204,28.007889,23.614954,7912900
+2006-05-05,28.106508,28.747534,28.106508,28.688362,24.188700,52752500
+2006-05-08,28.895464,28.915188,28.214991,28.372782,23.922617,27970800
+2006-05-09,28.451677,29.191322,28.382643,29.171598,24.596140,18457800
+2006-05-10,29.388559,29.773176,29.171598,29.694281,25.036842,21407000
+2006-05-11,29.437870,29.526627,29.112427,29.388559,24.779072,14670700
+2006-05-12,29.388559,29.566074,29.201183,29.487179,24.862223,12971400
+2006-05-15,29.339251,29.585798,29.309664,29.575937,24.937063,11151500
+2006-05-16,29.556213,30.029585,29.457594,29.930967,25.236404,14401000
+2006-05-17,29.861933,29.881657,29.240631,29.349112,24.745808,15457400
+2006-05-18,29.349112,29.516766,28.777121,29.191322,24.612770,13828400
+2006-05-19,29.289940,29.980276,29.220907,29.733728,25.070103,14115700
+2006-05-22,29.684418,29.901381,29.437870,29.585798,24.945377,12141400
+2006-05-23,29.684418,30.029585,29.536489,29.812624,25.136621,10525600
+2006-05-24,29.891520,29.930967,29.566074,29.723866,25.061785,12655700
+2006-05-25,29.881657,29.911243,29.595661,29.733728,25.070103,8435500
+2006-05-26,29.911243,30.108480,29.832348,30.088757,25.369448,7597800
+2006-05-30,30.039448,30.049309,29.743589,29.783037,25.111677,8311500
+2006-05-31,29.930967,30.088757,29.714005,30.078896,25.361130,14487600
+2006-06-01,30.078896,30.295858,30.000000,30.197239,25.460913,10215500
+2006-06-02,30.187376,30.266272,30.029585,30.197239,25.460913,8782800
+2006-06-05,30.197239,30.562130,30.029585,30.315582,25.560692,10161300
+2006-06-06,30.463511,30.552269,29.960552,30.108480,25.386076,16529600
+2006-06-07,30.108480,30.601578,30.000000,30.226824,25.485859,13684600
+2006-06-08,30.029585,30.059172,29.289940,29.457594,24.837275,20908200
+2006-06-09,29.506903,29.585798,28.846153,28.925049,24.388262,15255700
+2006-06-12,28.007889,28.609467,27.909269,28.500986,24.030710,19017400
+2006-06-13,28.126232,28.333334,27.830376,27.889545,23.515173,13740300
+2006-06-14,27.613413,28.402367,27.564102,28.293886,23.856096,15080300
+2006-06-15,28.244576,28.846153,28.106508,28.786982,24.271851,11964000
+2006-06-16,28.984221,29.132151,28.648914,29.043392,24.488043,12726000
+2006-06-19,29.023668,29.043392,28.599606,28.668638,24.172068,12423000
+2006-06-20,28.619329,28.934912,28.510849,28.826429,24.305109,13815800
+2006-06-21,28.767258,29.339251,28.639053,28.727810,24.221958,11730400
+2006-06-22,28.727810,29.102564,28.668638,28.875740,24.346684,9553700
+2006-06-23,28.796844,28.875740,28.461538,28.717949,24.213646,8752700
+2006-06-26,28.668638,29.339251,28.658777,29.260355,24.670973,9531300
+2006-06-27,29.171598,29.240631,28.915188,29.003944,24.454781,9018900
+2006-06-28,29.003944,29.230770,28.836292,28.974360,24.429838,8400800
+2006-06-29,29.171598,29.654833,29.082840,29.467455,24.845589,11244300
+2006-06-30,29.467455,29.753452,29.408283,29.585798,24.945377,12564400
+2006-07-03,29.773176,29.891520,29.546350,29.605522,24.962004,5877000
+2006-07-05,29.408283,30.029585,29.289940,29.654833,25.003580,11345300
+2006-07-06,29.644970,30.029585,29.526627,29.625246,24.978638,10921300
+2006-07-07,29.487179,29.664694,29.260355,29.418146,24.804016,9480200
+2006-07-10,29.733728,29.921104,29.437870,29.506903,24.878853,11881900
+2006-07-11,29.487179,29.852072,29.418146,29.832348,25.153252,10152300
+2006-07-12,29.832348,29.881657,29.398422,29.497042,24.870539,8532800
+2006-07-13,28.895464,28.925049,28.057199,28.303747,23.864414,17890000
+2006-07-14,28.155819,28.303747,27.761341,28.096647,23.689789,10688400
+2006-07-17,27.771204,28.205128,27.761341,28.205128,23.781258,10591700
+2006-07-18,28.076923,28.737673,27.928993,28.698225,24.197010,10978300
+2006-07-19,28.639053,29.132151,28.550297,28.856016,24.330055,11715700
+2006-07-20,28.836292,29.339251,28.717949,28.717949,24.213646,9350700
+2006-07-21,28.510849,28.589743,28.037476,28.076923,23.673161,11011800
+2006-07-24,28.165680,29.003944,28.096647,28.964497,24.421522,9343100
+2006-07-25,28.796844,29.211046,28.560158,29.072979,24.512987,8083700
+2006-07-26,29.043392,29.240631,28.816568,28.934912,24.396578,8284200
+2006-07-27,29.230770,29.230770,28.767258,28.806705,24.288477,10633300
+2006-07-28,28.974360,29.378698,28.856016,29.289940,24.695917,14858400
+2006-07-31,29.142012,29.319527,28.994083,29.280079,24.687607,9418800
+2006-08-01,29.240631,29.339251,28.915188,28.944773,24.404890,9995800
+2006-08-02,28.925049,29.506903,28.905325,29.388559,24.779072,8594800
+2006-08-03,29.102564,29.684418,28.984221,29.625246,24.978638,8256000
+2006-08-04,29.664694,29.704142,29.191322,29.487179,24.862223,10046200
+2006-08-07,29.487179,29.487179,28.777121,28.984221,24.438154,10648000
+2006-08-08,29.092703,29.280079,28.372782,28.579882,24.097229,12585300
+2006-08-09,29.585798,29.635109,28.392506,28.431953,23.972504,18948700
+2006-08-10,28.353058,29.319527,28.293886,29.171598,24.596140,17264900
+2006-08-11,29.211046,29.270218,28.816568,29.023668,24.471411,6917600
+2006-08-14,29.122288,29.388559,29.033531,29.112427,24.546249,8796500
+2006-08-15,29.240631,29.349112,29.072979,29.230770,24.646029,7613400
+2006-08-16,29.299803,29.349112,29.151875,29.299803,24.704237,9917300
+2006-08-17,29.191322,29.585798,29.151875,29.487179,24.862223,6192700
+2006-08-18,29.556213,29.556213,29.230770,29.497042,24.870539,6823600
+2006-08-21,29.487179,29.556213,29.270218,29.556213,24.920427,6812000
+2006-08-22,29.556213,29.635109,29.102564,29.230770,24.646029,14950500
+2006-08-23,29.211046,29.388559,28.609467,28.786982,24.271851,9198000
+2006-08-24,28.767258,28.905325,28.422091,28.550297,24.072287,10492900
+2006-08-25,28.402367,28.668638,28.303747,28.550297,24.072287,5945800
+2006-08-28,28.530573,29.043392,28.520710,28.964497,24.421522,5186800
+2006-08-29,28.994083,29.122288,28.599606,28.708086,24.205328,9101500
+2006-08-30,28.826429,29.201183,28.826429,29.161736,24.587820,7930400
+2006-08-31,29.092703,29.299803,28.905325,29.240631,24.654345,5314000
+2006-09-01,29.408283,29.585798,29.240631,29.477318,24.853910,6012800
+2006-09-05,29.408283,29.625246,29.339251,29.437870,24.820648,5986000
+2006-09-06,29.309664,29.358974,29.161736,29.201183,24.621082,4959200
+2006-09-07,29.013807,29.280079,28.895464,29.082840,24.521301,6721800
+2006-09-08,29.635109,29.635109,29.092703,29.171598,24.596140,7526600
+2006-09-11,28.994083,29.418146,28.994083,29.358974,24.754126,6634300
+2006-09-12,29.339251,29.861933,29.250494,29.792900,25.119995,12786300
+2006-09-13,29.832348,30.088757,29.585798,30.029585,25.319555,10923900
+2006-09-14,29.852072,29.990139,29.635109,29.970415,25.269667,5864900
+2006-09-15,30.078896,30.157791,29.704142,29.891520,25.203142,17195700
+2006-09-18,29.704142,29.911243,29.575937,29.644970,24.995266,6677000
+2006-09-19,29.704142,30.187376,29.644970,30.049309,25.336178,7704700
+2006-09-20,30.187376,30.621302,30.177515,30.601578,25.801830,11417100
+2006-09-21,30.473373,30.700197,30.088757,30.285995,25.535749,9318500
+2006-09-22,30.147928,30.276134,29.585798,29.664694,25.011892,11383600
+2006-09-25,29.871796,29.980276,29.329388,29.743589,25.078417,8472500
+2006-09-26,29.852072,30.670612,29.842209,30.641026,25.835093,13987400
+2006-09-27,30.483234,31.025640,30.473373,30.611441,25.810150,13155400
+2006-09-28,30.571993,30.798817,30.394478,30.502958,25.718679,9665200
+2006-09-29,30.522682,30.867849,30.404339,30.483234,25.702049,8331000
+2006-10-02,30.364891,30.591717,29.980276,30.108480,25.386076,6695500
+2006-10-03,30.059172,30.621302,30.059172,30.571993,25.776886,8017500
+2006-10-04,30.424063,30.838264,30.374754,30.759369,25.934875,10073300
+2006-10-05,30.631165,30.729782,30.512821,30.641026,25.835093,5073500
+2006-10-06,30.641026,30.956608,30.433926,30.907297,26.059603,7451900
+2006-10-09,30.867849,31.153847,30.798817,30.956608,26.101175,5297300
+2006-10-10,30.867849,30.956608,30.631165,30.818541,25.984764,6376400
+2006-10-11,30.769230,30.966469,30.719921,30.877712,26.034655,4520700
+2006-10-12,30.917160,30.996056,30.690336,30.867849,26.026340,5930400
+2006-10-13,30.749506,30.808678,30.355030,30.680473,25.868355,5466100
+2006-10-16,30.621302,30.710060,30.483234,30.591717,25.793516,4523700
+2006-10-17,30.591717,30.788954,30.542406,30.670612,25.860041,6752900
+2006-10-18,30.798817,31.153847,30.769230,31.143984,26.259161,13001400
+2006-10-19,31.005917,31.311638,30.966469,31.153847,26.267481,6279900
+2006-10-20,31.311638,31.351086,30.936884,31.015779,26.151066,8248100
+2006-10-23,30.769230,30.897436,30.631165,30.848125,26.009710,8411100
+2006-10-24,30.710060,31.005917,30.601578,30.946745,26.092861,6999400
+2006-10-25,31.025640,31.114399,30.571993,30.769230,25.943192,7591300
+2006-10-26,30.917160,31.538462,30.867849,31.538462,26.591770,10394000
+2006-10-27,31.400394,31.548323,31.143984,31.291914,26.383890,6839100
+2006-10-30,31.252466,31.469427,30.966469,31.420118,26.491991,4644000
+2006-10-31,31.390533,31.439842,30.897436,31.025640,26.159384,7540700
+2006-11-01,31.114399,31.420118,31.025640,31.291914,26.383890,8551700
+2006-11-02,31.262327,31.370810,30.956608,31.262327,26.358948,5227400
+2006-11-03,31.390533,31.528599,31.252466,31.449703,26.516928,8649200
+2006-11-06,31.558186,32.031559,31.499014,32.011833,26.990892,11385300
+2006-11-07,32.041420,32.564102,31.972387,32.396450,27.315187,11927400
+2006-11-08,32.248520,32.781067,32.021698,32.642998,27.523067,7123500
+2006-11-09,32.928993,33.382645,32.544380,33.116371,27.922192,19943400
+2006-11-10,32.297829,32.396450,31.301775,31.952663,26.941010,25376700
+2006-11-13,31.854044,31.903353,31.262327,31.814596,26.824593,11942400
+2006-11-14,32.021698,32.149902,31.469427,32.080868,27.049101,12774400
+2006-11-15,31.982248,32.317554,31.893492,32.238659,27.182140,10724500
+2006-11-16,32.485207,32.642998,32.238659,32.603550,27.489803,8681600
+2006-11-17,32.603550,32.751480,32.435898,32.485207,27.390022,10254300
+2006-11-20,32.544380,32.662724,32.416172,32.662724,27.539698,11205800
+2006-11-21,32.593689,32.909271,32.544380,32.771202,27.631155,5805200
+2006-11-22,32.771202,32.820515,32.317554,32.534515,27.431595,8424200
+2006-11-24,32.287968,32.603550,32.189350,32.455620,27.365076,2680300
+2006-11-27,32.396450,32.435898,32.011833,32.140041,27.098993,10127600
+2006-11-28,32.011833,32.524654,31.982248,32.435898,27.348446,10557300
+2006-11-29,32.485207,32.544380,32.327415,32.426037,27.340134,9576400
+2006-11-30,32.534515,32.781067,32.357002,32.593689,27.481491,9079600
+2006-12-01,32.593689,32.672585,32.307693,32.633137,27.514750,8492000
+2006-12-04,32.761341,33.037476,32.633137,32.978302,27.805775,7918900
+2006-12-05,32.978302,33.727810,32.919132,33.727810,28.437723,13718900
+2006-12-06,33.560158,33.717949,33.402367,33.629192,28.354576,8221300
+2006-12-07,33.579880,33.875740,33.530571,33.668640,28.387838,6592600
+2006-12-08,33.540436,34.013805,33.540436,33.915188,28.595715,6309500
+2006-12-11,33.816566,34.161736,33.777119,34.043392,28.703814,7160800
+2006-12-12,33.964497,34.201183,33.806705,34.063118,28.720444,9487600
+2006-12-13,33.964497,34.082840,33.885601,33.974358,28.905029,8575400
+2006-12-14,34.023670,34.260357,33.905327,34.240631,29.131573,8708000
+2006-12-15,34.339249,34.358974,33.678501,33.826431,28.779177,14123400
+2006-12-18,33.629192,34.211044,33.629192,34.013805,28.938593,7995800
+2006-12-19,33.974358,34.408283,33.856014,34.368835,29.240646,8793500
+2006-12-20,34.280079,34.319527,34.043392,34.132149,29.039276,5743800
+2006-12-21,34.250492,34.319527,33.954636,34.063118,28.980543,6379400
+2006-12-22,34.132149,34.132149,33.668640,33.688362,28.661703,6810000
+2006-12-26,33.648914,34.151875,33.471401,34.053253,28.972153,6655900
+2006-12-27,34.013805,34.220909,33.846153,34.063118,28.980543,7629400
+2006-12-28,34.063118,34.329388,33.994083,34.043392,28.963766,7206900
+2006-12-29,33.944775,34.132149,33.796844,33.796844,28.754002,9830900
+2007-01-03,33.737671,34.063118,33.520710,33.727810,28.695265,13566800
+2007-01-04,33.727810,34.072979,33.698223,33.994083,28.921812,9809300
+2007-01-05,33.796844,33.964497,33.520710,33.717949,28.686880,10554700
+2007-01-08,33.717949,34.151875,33.599606,34.023670,28.946981,9482600
+2007-01-09,34.023670,34.211044,33.481262,33.974358,28.905029,11592000
+2007-01-10,33.678501,34.072979,33.510849,34.013805,28.938593,7307700
+2007-01-11,34.112427,34.546352,33.974358,34.506905,29.358114,12365200
+2007-01-12,34.506905,34.802761,34.368835,34.723866,29.542707,12026000
+2007-01-16,34.526627,34.664696,34.408283,34.546352,29.391674,11151300
+2007-01-17,34.516766,34.812622,34.418144,34.763313,29.576267,9949200
+2007-01-18,34.812622,35.433926,34.763313,35.355030,30.079683,16729000
+2007-01-19,35.433926,35.473373,34.990139,35.009861,29.786026,12625100
+2007-01-22,34.960552,34.990139,34.566074,34.664696,29.492363,8844400
+2007-01-23,34.536488,35.147930,34.516766,35.088757,29.853151,11282300
+2007-01-24,35.088757,35.256409,34.783039,34.861935,29.660166,10765000
+2007-01-25,34.861935,34.930965,34.398422,34.457592,29.316156,9587600
+2007-01-26,34.516766,34.585800,33.994083,34.072979,28.988934,10536800
+2007-01-29,34.023670,34.585800,33.964497,34.201183,29.098013,11193800
+2007-01-30,34.161736,34.605522,34.142014,34.556213,29.400063,6641800
+2007-01-31,34.654831,34.773174,34.526627,34.684418,29.509142,9061700
+2007-02-01,34.605522,34.664696,34.408283,34.526627,29.374887,8160700
+2007-02-02,34.536488,34.891518,34.516766,34.694279,29.517529,7775600
+2007-02-05,34.911243,35.000000,34.664696,34.773174,29.584652,8995000
+2007-02-06,34.921104,34.970413,34.595661,34.704144,29.525921,7909400
+2007-02-07,34.861935,35.098618,34.714005,34.990139,29.769239,12163000
+2007-02-08,35.542408,35.591717,34.467457,34.802761,29.609827,20742800
+2007-02-09,34.714005,34.812622,33.451675,33.885601,28.829525,19252200
+2007-02-12,34.053253,34.181461,33.303749,33.451675,28.460340,18021100
+2007-02-13,33.599606,34.142014,33.461540,34.112427,29.022493,13027800
+2007-02-14,34.102566,34.339249,34.043392,34.270218,29.156744,12000100
+2007-02-15,34.220909,34.250492,33.698223,34.191322,29.089621,11202100
+2007-02-16,34.201183,34.428009,33.934910,34.408283,29.274208,9637800
+2007-02-20,34.339249,34.644970,34.191322,34.566074,29.408451,8820900
+2007-02-21,34.349113,34.753452,34.270218,34.654831,29.483969,8349300
+2007-02-22,34.585800,34.753452,34.477318,34.674557,29.500757,7764800
+2007-02-23,34.615383,34.723866,34.497040,34.654831,29.483969,7245900
+2007-02-26,34.654831,34.802761,34.447731,34.625248,29.458801,8216600
+2007-02-27,34.270218,34.516766,32.199211,32.642998,27.772318,15158900
+2007-02-28,33.037476,33.905327,32.869823,33.777119,28.737217,15824200
+2007-03-01,33.264301,34.023670,33.067062,33.915188,28.854692,13920200
+2007-03-02,33.639053,33.767258,33.392506,33.481262,28.485512,10059500
+2007-03-05,33.086784,33.639053,32.958580,33.254436,28.292530,10664100
+2007-03-06,33.382645,34.072979,33.284023,33.895462,28.837902,11529900
+2007-03-07,33.915188,34.142014,33.777119,33.954636,28.888254,9523700
+2007-03-08,34.142014,34.349113,33.925049,34.211044,29.106398,8294600
+2007-03-09,34.388561,34.418144,33.806705,33.895462,28.837902,11562700
+2007-03-12,33.708088,33.895462,33.451675,33.727810,28.695265,11829600
+2007-03-13,33.520710,33.648914,33.047337,33.155819,28.208628,13897900
+2007-03-14,33.984222,33.984222,32.820515,33.264301,28.300917,10330700
+2007-03-15,33.491123,33.619328,33.274162,33.323471,28.351263,10051100
+2007-03-16,33.431953,33.629192,33.037476,33.145958,28.200235,13821200
+2007-03-19,33.402367,33.648914,33.343197,33.619328,28.602974,7911700
+2007-03-20,33.579880,33.954636,33.500988,33.905327,28.846300,7287800
+2007-03-21,33.836292,34.763313,33.786983,34.684418,29.509142,11941200
+2007-03-22,33.974358,34.930965,33.974358,34.842209,29.643389,11952600
+2007-03-23,34.575935,34.822487,34.388561,34.506905,29.358114,7273200
+2007-03-26,34.408283,34.763313,34.319527,34.733727,29.551088,9951700
+2007-03-27,34.516766,34.625248,34.161736,34.181461,29.081226,12374000
+2007-03-28,34.112427,34.122288,33.579880,33.757397,28.720444,12583500
+2007-03-29,34.211044,34.230770,33.767258,33.915188,28.854692,7859700
+2007-03-30,33.944775,34.270218,33.570019,33.954636,28.888254,10666100
+2007-04-02,33.925049,34.033531,33.767258,33.994083,28.921812,7048100
+2007-04-03,34.092701,34.684418,34.043392,34.437870,29.299381,12228200
+2007-04-04,34.358974,34.388561,33.964497,34.161736,29.064449,10021600
+2007-04-05,34.142014,34.526627,34.072979,34.428009,29.290989,7687400
+2007-04-09,34.428009,34.526627,34.319527,34.428009,29.290989,8178800
+2007-04-10,34.487179,34.546352,34.280079,34.506905,29.358114,7595900
+2007-04-11,34.457592,34.595661,33.865879,34.112427,29.022493,12003800
+2007-04-12,34.122288,34.250492,33.954636,34.191322,29.089621,9245000
+2007-04-13,34.191322,34.388561,34.033531,34.240631,29.131573,9093000
+2007-04-16,34.319527,34.654831,34.289940,34.536488,29.383287,8112900
+2007-04-17,34.447731,34.753452,34.230770,34.664696,29.492363,8996100
+2007-04-18,34.566074,34.615383,34.270218,34.437870,29.299381,7425400
+2007-04-19,34.467457,34.536488,34.092701,34.437870,29.299381,8200100
+2007-04-20,34.704144,34.832348,34.230770,34.792900,29.601437,12250200
+2007-04-23,34.714005,34.812622,34.546352,34.595661,29.433628,6983700
+2007-04-24,34.615383,34.753452,34.408283,34.428009,29.290989,6941100
+2007-04-25,34.585800,34.940830,34.378696,34.832348,29.635002,9226700
+2007-04-26,34.684418,34.980278,34.487179,34.684418,29.509142,7541300
+2007-04-27,34.516766,34.644970,34.309666,34.497040,29.349720,6212200
+2007-04-30,34.536488,34.654831,34.220909,34.497040,29.349720,10172700
+2007-05-01,34.605522,35.009861,34.605522,34.980278,29.760853,11099000
+2007-05-02,34.960552,35.502960,34.911243,35.207100,29.953833,12032100
+2007-05-03,35.236687,35.512821,35.128204,35.197239,29.945436,8327400
+2007-05-04,35.256409,35.660751,35.216961,35.355030,30.079683,7431200
+2007-05-07,35.483234,35.798817,35.424065,35.562130,30.255888,8706200
+2007-05-08,35.670612,36.065090,35.522682,36.045364,30.667015,15509600
+2007-05-09,35.552269,35.828403,35.207100,35.621304,30.306232,14113200
+2007-05-10,35.345169,35.493095,35.059170,35.197239,29.945436,9790600
+2007-05-11,35.246548,35.670612,35.236687,35.571991,30.264280,7583700
+2007-05-14,35.571991,35.749508,35.384617,35.483234,30.188766,6721100
+2007-05-15,35.631165,35.838264,35.394478,35.443787,30.155199,7872100
+2007-05-16,35.631165,35.680473,35.285995,35.512821,30.213940,8478900
+2007-05-17,35.463512,35.729782,35.295856,35.493095,30.197155,6851600
+2007-05-18,35.611439,35.690334,35.335304,35.522682,30.222326,10991100
+2007-05-21,35.502960,36.252464,35.483234,35.936882,30.574724,12152700
+2007-05-22,36.193295,36.193295,35.601578,35.769230,30.432087,7249400
+2007-05-23,35.769230,36.282051,35.749508,35.976330,30.608282,7886400
+2007-05-24,36.055225,36.065090,35.305721,35.345169,30.071299,8847800
+2007-05-25,35.414200,35.650887,35.147930,35.532543,30.230719,6401800
+2007-05-29,35.552269,35.641026,34.980278,35.098618,29.861538,11592700
+2007-05-30,34.871796,35.483234,34.783039,35.414200,30.130033,12285500
+2007-05-31,35.404339,35.453648,34.783039,34.950691,29.735685,15810200
+2007-06-01,34.921104,35.128204,34.644970,34.753452,29.567881,12748000
+2007-06-04,35.088757,35.285995,34.684418,35.207100,29.953833,12625200
+2007-06-05,35.266273,35.394478,34.723866,34.783039,29.593046,9960800
+2007-06-06,34.773174,34.871796,34.270218,34.428009,29.290989,9048700
+2007-06-07,34.289940,34.428009,33.481262,33.786983,28.745615,13400100
+2007-06-08,33.540436,33.806705,33.323471,33.727810,28.695265,11572000
+2007-06-11,33.806705,33.964497,33.648914,33.708088,28.678492,7649000
+2007-06-12,33.629192,33.708088,33.057198,33.096645,28.158281,9729800
+2007-06-13,33.500000,33.549999,33.000000,33.400002,28.825726,16055000
+2007-06-14,33.410000,33.970001,33.400002,33.790001,29.162310,14256200
+2007-06-15,34.099998,34.750000,34.000000,34.400002,29.688768,22308600
+2007-06-18,34.500000,34.889999,34.480000,34.540001,29.809589,13016200
+2007-06-19,34.509998,34.860001,34.439999,34.759998,29.999462,11318200
+2007-06-20,34.759998,34.759998,34.160000,34.189999,29.507523,11218300
+2007-06-21,34.130001,34.360001,33.880001,34.189999,29.507523,10323800
+2007-06-22,34.009998,34.439999,33.910000,34.139999,29.464375,15008200
+2007-06-25,34.110001,34.520000,33.919998,34.060001,29.395334,10094000
+2007-06-26,34.200001,34.290001,33.860001,34.060001,29.395334,9643600
+2007-06-27,33.799999,34.340000,33.730000,34.270000,29.576569,8017900
+2007-06-28,34.250000,34.250000,33.720001,33.849998,29.214088,11961500
+2007-06-29,33.889999,34.459999,33.709999,34.139999,29.464375,12019300
+2007-07-02,34.380001,34.720001,34.200001,34.520000,29.792326,7763800
+2007-07-03,34.540001,34.669998,34.299999,34.540001,29.809589,5158900
+2007-07-05,34.500000,34.639999,34.240002,34.630001,29.887274,7330400
+2007-07-06,34.590000,34.610001,34.430000,34.490002,29.766438,6726000
+2007-07-09,34.540001,34.610001,34.380001,34.459999,29.740549,6021500
+2007-07-10,34.169998,34.590000,33.849998,33.889999,29.248611,11101000
+2007-07-11,33.759998,34.009998,33.669998,33.980000,29.326286,9836700
+2007-07-12,34.110001,34.270000,33.959999,34.250000,29.559313,10820300
+2007-07-13,34.099998,34.570000,34.000000,34.369999,29.662868,8205500
+2007-07-16,34.209999,34.580002,34.209999,34.470001,29.749180,5896500
+2007-07-17,34.439999,34.869999,34.349998,34.680000,29.930418,11371400
+2007-07-18,34.560001,34.779999,34.189999,34.599998,29.861374,10280800
+2007-07-19,34.660000,34.830002,34.400002,34.470001,29.749180,8068900
+2007-07-20,34.459999,34.560001,34.279999,34.340000,29.636976,10748100
+2007-07-23,34.450001,35.230000,34.430000,35.029999,30.232483,11264900
+2007-07-24,34.950001,35.380001,34.669998,34.750000,29.990835,10447200
+2007-07-25,34.910000,35.189999,34.750000,34.930000,30.146187,10207300
+2007-07-26,34.759998,34.900002,33.740002,34.080002,29.412592,15441300
+2007-07-27,34.139999,34.380001,33.740002,33.740002,29.119152,12203200
+2007-07-30,33.980000,34.150002,33.570000,34.009998,29.352175,11459800
+2007-07-31,34.009998,34.070000,32.990002,33.000000,28.480499,15670600
+2007-08-01,32.880001,33.939999,32.750000,33.830002,29.196831,15048300
+2007-08-02,33.980000,34.529999,33.689999,34.360001,29.654247,11475200
+2007-08-03,34.139999,34.619999,33.840000,33.900002,29.257248,12642500
+2007-08-06,34.000000,34.389999,33.830002,34.299999,29.602461,14263800
+2007-08-07,33.950001,34.849998,33.849998,34.549999,29.818222,14309900
+2007-08-08,34.599998,34.930000,33.849998,34.290001,29.593832,13078200
+2007-08-09,33.830002,34.200001,33.000000,33.000000,28.480499,14932200
+2007-08-10,32.570000,33.419998,32.070000,33.160000,28.618589,15404100
+2007-08-13,33.340000,33.740002,33.130001,33.240002,28.687630,8596100
+2007-08-14,33.240002,33.500000,32.130001,32.360001,27.928148,11502700
+2007-08-15,32.299999,32.639999,31.629999,31.709999,27.367172,12764000
+2007-08-16,31.350000,32.630001,31.250000,32.580002,28.118027,19361900
+2007-08-17,33.380001,33.549999,32.299999,32.680000,28.204330,15964200
+2007-08-20,32.779999,33.110001,32.520000,32.910000,28.402826,10782500
+2007-08-21,32.709999,33.450001,32.639999,33.290001,28.730785,9959400
+2007-08-22,33.580002,33.680000,33.110001,33.570000,28.972437,10689800
+2007-08-23,33.750000,33.980000,32.970001,33.160000,28.618589,10962700
+2007-08-24,33.270000,33.889999,33.119999,33.869999,29.231348,7380600
+2007-08-27,33.740002,34.080002,33.490002,33.840000,29.205458,7100200
+2007-08-28,33.750000,33.790001,33.070000,33.200001,28.653109,9712800
+2007-08-29,33.299999,33.950001,33.070000,33.889999,29.248611,8361500
+2007-08-30,33.570000,33.740002,33.330002,33.470001,28.886135,7330700
+2007-08-31,33.740002,33.820000,33.450001,33.599998,28.998333,7075900
+2007-09-04,33.509998,34.299999,33.500000,34.130001,29.455746,9743200
+2007-09-05,33.840000,34.099998,33.740002,34.040001,29.378065,10641700
+2007-09-06,34.220001,34.480000,33.869999,34.410000,29.697388,8150700
+2007-09-07,33.930000,34.160000,33.470001,33.590000,28.989700,9909900
+2007-09-10,33.590000,33.869999,33.230000,33.560001,28.963814,5500100
+2007-09-11,33.630001,33.700001,33.290001,33.490002,28.903397,7714800
+2007-09-12,33.529999,33.820000,33.410000,33.720001,29.101894,8113000
+2007-09-13,33.799999,33.900002,33.439999,33.520000,28.929287,6968700
+2007-09-14,33.320000,33.849998,33.000000,33.560001,28.963814,8797900
+2007-09-17,33.360001,33.450001,33.060001,33.380001,28.808460,7205200
+2007-09-18,33.540001,34.700001,33.400002,34.580002,29.844116,13653100
+2007-09-19,34.810001,34.950001,34.540001,34.560001,29.826855,10549400
+2007-09-20,34.669998,34.750000,33.799999,34.040001,29.378065,10201900
+2007-09-21,34.060001,34.889999,34.040001,34.599998,29.861374,14708900
+2007-09-24,34.599998,34.599998,34.200001,34.320000,29.619722,8089600
+2007-09-25,34.200001,34.320000,33.680000,34.070000,29.403959,9942600
+2007-09-26,34.270000,34.580002,34.189999,34.520000,29.792326,8417300
+2007-09-27,34.480000,34.490002,34.119999,34.209999,29.524786,6974400
+2007-09-28,34.259998,34.419998,33.930000,34.389999,29.680138,8016200
+2007-10-01,34.380001,34.910000,34.380001,34.650002,29.904528,8662100
+2007-10-02,34.650002,35.009998,34.599998,34.770000,30.008095,9403000
+2007-10-03,34.599998,34.970001,34.540001,34.720001,29.964941,5716800
+2007-10-04,34.889999,35.070000,34.830002,34.900002,30.120291,5580700
+2007-10-05,35.090000,35.599998,35.029999,35.470001,30.612225,7324700
+2007-10-08,35.470001,35.689999,35.130001,35.270000,30.439615,4791700
+2007-10-09,35.160000,35.450001,35.099998,35.450001,30.594965,7273700
+2007-10-10,35.450001,35.549999,35.060001,35.279999,30.448240,4715700
+2007-10-11,35.500000,35.630001,34.900002,35.009998,30.215218,6109800
+2007-10-12,35.200001,35.580002,35.150002,35.470001,30.612225,5474100
+2007-10-15,35.369999,35.490002,34.860001,35.139999,30.327417,6688100
+2007-10-16,35.130001,35.340000,34.939999,35.009998,30.215218,8072600
+2007-10-17,35.259998,35.590000,35.139999,35.380001,30.534557,9032300
+2007-10-18,35.310001,35.400002,34.709999,34.779999,30.016727,8503300
+2007-10-19,34.549999,34.770000,33.770000,33.810001,29.179569,11195000
+2007-10-22,33.680000,34.790001,33.570000,34.680000,29.930418,9859200
+2007-10-23,34.820000,34.959999,34.580002,34.889999,30.111662,6380800
+2007-10-24,34.549999,35.110001,34.360001,35.060001,30.258379,7848700
+2007-10-25,34.980000,35.000000,34.139999,34.450001,29.731920,10633300
+2007-10-26,34.619999,34.720001,33.939999,34.380001,29.671503,11462400
+2007-10-29,34.490002,34.810001,34.200001,34.680000,29.930418,7164000
+2007-10-30,34.480000,34.570000,34.250000,34.290001,29.593832,4885800
+2007-10-31,34.009998,34.730000,34.009998,34.630001,29.887274,7818200
+2007-11-01,34.509998,34.709999,33.320000,33.799999,29.170938,11121700
+2007-11-02,34.259998,34.259998,33.520000,33.919998,29.274504,8945500
+2007-11-05,33.400002,34.259998,33.400002,33.880001,29.239981,7983900
+2007-11-06,33.860001,34.180000,33.619999,34.080002,29.412592,6594500
+2007-11-07,33.529999,34.029999,33.310001,33.500000,28.912022,7564100
+2007-11-08,33.439999,33.830002,32.889999,33.630001,29.024227,12136700
+2007-11-09,32.810001,33.200001,32.060001,32.740002,28.256113,14932200
+2007-11-12,32.000000,32.720001,32.000000,32.020000,27.634718,13316400
+2007-11-13,32.220001,33.070000,32.060001,33.009998,28.489126,11447600
+2007-11-14,33.139999,33.139999,31.750000,31.860001,27.496626,15313300
+2007-11-15,31.799999,32.840000,31.750000,32.400002,27.962679,10539500
+2007-11-16,32.590000,32.700001,32.009998,32.529999,28.074871,9347700
+2007-11-19,32.259998,32.299999,31.150000,31.250000,26.970173,14612100
+2007-11-20,31.469999,31.889999,31.020000,31.549999,27.229084,9425700
+2007-11-21,31.350000,31.700001,31.120001,31.500000,27.185938,8661400
+2007-11-23,31.650000,31.900000,31.490000,31.840000,27.479366,3548500
+2007-11-26,31.910000,31.959999,31.200001,31.240000,26.961538,7337600
+2007-11-27,31.379999,31.820000,30.680000,31.719999,27.375805,14270700
+2007-11-28,31.889999,32.959999,31.670000,32.689999,28.212959,12798400
+2007-11-29,32.650002,32.910000,32.380001,32.810001,28.316523,7761400
+2007-11-30,33.490002,33.490002,32.930000,33.150002,28.609959,11958400
+2007-12-03,33.060001,33.500000,32.830002,33.040001,28.515022,9049100
+2007-12-04,32.910000,33.070000,32.439999,32.750000,28.264742,9322200
+2007-12-05,32.500000,32.990002,32.259998,32.750000,28.570070,7463500
+2007-12-06,32.740002,32.889999,32.250000,32.720001,28.543900,7927400
+2007-12-07,32.720001,33.000000,32.660000,32.790001,28.604963,8023900
+2007-12-10,32.820000,32.970001,32.270000,32.349998,28.221121,12255800
+2007-12-11,32.349998,32.509998,31.709999,31.760000,27.706425,13925600
+2007-12-12,32.270000,32.700001,31.969999,32.279999,28.160053,10617700
+2007-12-13,32.080002,32.889999,32.029999,32.759998,28.578789,11704100
+2007-12-14,32.900002,33.230000,32.549999,33.009998,28.796881,11888200
+2007-12-17,32.869999,33.250000,32.689999,32.990002,28.779438,12023200
+2007-12-18,32.910000,33.299999,32.610001,33.020000,28.805609,11937200
+2007-12-19,32.959999,33.139999,32.080002,32.259998,28.142607,11035600
+2007-12-20,32.430000,32.500000,32.130001,32.330002,28.203671,9591800
+2007-12-21,32.650002,33.049999,32.250000,32.939999,28.735815,20408900
+2007-12-24,32.959999,33.410000,32.950001,33.240002,28.997532,3458100
+2007-12-26,33.029999,33.200001,32.730000,32.820000,28.631134,5029400
+2007-12-27,32.639999,32.900002,32.340000,32.430000,28.290911,6700400
+2007-12-28,32.770000,32.799999,32.270000,32.419998,28.282183,5621800
+2007-12-31,32.419998,32.520000,32.240002,32.279999,28.160053,5447900
+2008-01-02,32.320000,32.630001,31.690001,31.840000,27.776215,9269900
+2008-01-03,31.879999,32.020000,31.660000,31.770000,27.715149,9681100
+2008-01-04,31.000000,31.680000,31.000000,31.129999,27.156830,9550700
+2008-01-07,31.379999,31.490000,30.910000,31.160000,27.183001,10742900
+2008-01-08,31.250000,31.500000,30.459999,30.540001,26.642136,13014300
+2008-01-09,30.570000,30.590000,29.200001,30.160000,26.310631,24858700
+2008-01-10,29.900000,30.940001,29.900000,30.670000,26.755541,12240900
+2008-01-11,30.450001,30.879999,30.209999,30.320000,26.450211,11671000
+2008-01-14,30.500000,30.750000,30.120001,30.350000,26.476381,8717400
+2008-01-15,30.219999,30.280001,29.580000,29.850000,26.040201,12043800
+2008-01-16,29.660000,30.270000,29.330000,29.830000,26.022753,18819600
+2008-01-17,29.900000,30.040001,28.690001,28.770000,25.098043,20109200
+2008-01-18,29.000000,29.309999,28.250000,28.510000,24.871225,16355300
+2008-01-22,27.180000,28.480000,26.299999,28.120001,24.531000,20538100
+2008-01-23,27.100000,28.540001,27.090000,28.520000,24.879950,22041300
+2008-01-24,28.580000,29.309999,28.350000,29.230000,25.499329,14847200
+2008-01-25,29.530001,29.860001,28.600000,28.680000,25.019531,11966600
+2008-01-28,28.750000,29.469999,28.500000,29.370001,25.621466,10421800
+2008-01-29,28.730000,29.200001,28.120001,28.799999,25.124214,23011500
+2008-01-30,29.059999,29.980000,29.059999,29.410000,25.656357,16609200
+2008-01-31,29.040001,30.170000,28.840000,29.840000,26.031477,15074200
+2008-02-01,30.750000,30.799999,30.180000,30.660000,26.746819,13988600
+2008-02-04,31.049999,31.049999,30.620001,30.900000,26.956188,11835300
+2008-02-05,30.990000,30.990000,30.049999,30.070000,26.232121,19217700
+2008-02-06,31.900000,32.180000,31.100000,31.500000,27.479605,31176300
+2008-02-07,31.260000,31.920000,31.110001,31.719999,27.671528,16612800
+2008-02-08,31.610001,32.299999,31.610001,32.119999,28.020475,15161400
+2008-02-11,32.270000,32.270000,31.520000,31.930000,27.854725,9082000
+2008-02-12,32.060001,32.439999,31.770000,32.029999,27.941961,14314100
+2008-02-13,32.070000,32.959999,32.070000,32.779999,28.596239,14445500
+2008-02-14,32.720001,32.849998,32.160000,32.320000,28.194948,11997100
+2008-02-15,32.160000,32.700001,32.029999,32.490002,28.343254,15225700
+2008-02-19,32.529999,32.779999,32.119999,32.189999,28.081539,10609500
+2008-02-20,31.969999,32.669998,31.889999,32.580002,28.421766,10207100
+2008-02-21,32.630001,32.889999,32.200001,32.270000,28.151331,10305200
+2008-02-22,32.250000,32.639999,31.799999,32.570000,28.413040,8812400
+2008-02-25,32.820000,33.090000,32.490002,32.889999,28.692198,13567800
+2008-02-26,32.750000,33.209999,32.720001,32.889999,28.692198,10340600
+2008-02-27,32.759998,33.230000,32.610001,33.099998,28.875395,10319600
+2008-02-28,32.930000,33.189999,32.439999,32.540001,28.386869,11221100
+2008-02-29,32.130001,32.849998,32.130001,32.410000,28.273464,14575900
+2008-03-03,32.610001,32.709999,32.110001,32.410000,28.273464,8283100
+2008-03-04,32.160000,32.250000,31.379999,31.870001,27.802385,15907700
+2008-03-05,31.900000,32.150002,31.520000,31.639999,27.601738,9692200
+2008-03-06,31.549999,31.740000,31.190001,31.250000,27.261513,10529000
+2008-03-07,31.110001,31.250000,30.559999,30.760000,26.834055,12750400
+2008-03-10,30.820000,30.820000,30.280001,30.459999,26.572342,12622400
+2008-03-11,31.000000,31.209999,30.610001,31.190001,27.209173,14520900
+2008-03-12,31.260000,31.820000,31.200001,31.410000,27.401093,9646200
+2008-03-13,31.209999,31.580000,30.820000,31.330000,27.331305,10556200
+2008-03-14,31.330000,31.360001,30.430000,30.780001,26.851501,11421400
+2008-03-17,30.139999,30.850000,30.049999,30.459999,26.572342,12626300
+2008-03-18,30.770000,31.730000,30.770000,31.719999,27.671528,13788400
+2008-03-19,31.690001,31.900000,31.230000,31.240000,27.252790,11737100
+2008-03-20,31.280001,31.980000,31.250000,31.900000,27.828550,13802300
+2008-03-24,32.000000,32.160000,31.799999,32.040001,27.950686,10536000
+2008-03-25,31.990000,32.240002,31.840000,32.080002,27.985586,8547300
+2008-03-26,31.910000,32.040001,31.530001,31.760000,27.706425,9219700
+2008-03-27,31.879999,31.959999,31.309999,31.379999,27.374924,9569900
+2008-03-28,31.590000,31.780001,31.000000,31.000000,27.043425,11021000
+2008-03-31,31.040001,31.480000,30.990000,31.379999,27.374924,11352000
+2008-04-01,31.520000,32.020000,31.520000,31.910000,27.837280,15649900
+2008-04-02,32.000000,32.389999,31.459999,31.559999,27.531950,13072500
+2008-04-03,31.430000,31.700001,31.150000,31.530001,27.505779,8981100
+2008-04-04,31.520000,31.620001,31.059999,31.240000,27.252790,12519000
+2008-04-07,31.410000,31.730000,31.209999,31.520000,27.497053,9732500
+2008-04-08,31.360001,31.400000,30.950001,31.070000,27.104486,13068800
+2008-04-09,31.120001,31.209999,30.610001,30.850000,26.912569,10508200
+2008-04-10,30.790001,31.559999,30.719999,31.350000,27.348753,12183000
+2008-04-11,31.040001,31.490000,30.150000,30.180000,26.328081,15488300
+2008-04-14,30.230000,30.379999,29.830000,30.040001,26.205952,9476300
+2008-04-15,30.070000,30.100000,29.570000,29.930000,26.109991,10320500
+2008-04-16,30.120001,30.379999,29.820000,30.320000,26.450211,10606400
+2008-04-17,30.240000,30.889999,30.240000,30.780001,26.851501,13438900
+2008-04-18,30.850000,31.549999,30.850000,31.330000,27.331305,11513500
+2008-04-21,31.100000,31.500000,31.049999,31.459999,27.444708,8797300
+2008-04-22,31.389999,31.420000,31.170000,31.290001,27.296410,8461400
+2008-04-23,31.469999,31.680000,31.110001,31.639999,27.601738,15000000
+2008-04-24,31.850000,32.200001,31.690001,31.940001,27.863451,12009500
+2008-04-25,32.040001,32.450001,31.950001,32.360001,28.229847,11949900
+2008-04-28,32.549999,32.610001,32.209999,32.430000,28.290911,7648600
+2008-04-29,32.320000,32.880001,32.209999,32.759998,28.578789,10179700
+2008-04-30,32.779999,33.029999,32.330002,32.430000,28.290911,13077000
+2008-05-01,32.450001,33.419998,32.419998,33.299999,29.049871,12246700
+2008-05-02,33.500000,33.799999,33.299999,33.490002,29.215626,8708000
+2008-05-05,33.720001,33.720001,33.119999,33.290001,29.041143,10193300
+2008-05-06,33.439999,33.779999,32.790001,33.730000,29.424990,15123000
+2008-05-07,34.209999,34.950001,34.049999,34.700001,30.271187,30876600
+2008-05-08,34.759998,34.799999,34.330002,34.560001,30.149059,18859600
+2008-05-09,34.099998,34.470001,34.029999,34.299999,29.922235,13434900
+2008-05-12,34.150002,34.799999,34.040001,34.770000,30.332260,9176400
+2008-05-13,34.580002,34.779999,34.220001,34.330002,29.948412,11067300
+2008-05-14,34.500000,34.980000,34.330002,34.799999,30.358421,9867400
+2008-05-15,34.650002,35.000000,34.520000,34.990002,30.524172,11357100
+2008-05-16,34.970001,35.020000,34.470001,34.910000,30.454388,12728900
+2008-05-19,34.790001,34.939999,34.470001,34.720001,30.288637,12365200
+2008-05-20,34.500000,34.599998,33.980000,34.090000,29.739040,12635300
+2008-05-21,33.950001,34.090000,33.500000,33.660000,29.363922,11358100
+2008-05-22,33.779999,33.779999,33.349998,33.610001,29.320307,7735300
+2008-05-23,33.520000,33.599998,33.180000,33.230000,28.988804,8951300
+2008-05-27,33.310001,33.740002,33.259998,33.639999,29.346478,9309100
+2008-05-28,33.660000,34.150002,33.660000,33.860001,29.538399,14201800
+2008-05-29,33.860001,34.349998,33.740002,33.810001,29.494778,12570800
+2008-05-30,33.980000,34.049999,33.480000,33.599998,29.311583,12779000
+2008-06-02,33.500000,33.590000,33.049999,33.299999,29.049871,10323300
+2008-06-03,33.480000,33.509998,33.000000,33.180000,28.945185,11060000
+2008-06-04,33.150002,34.590000,33.150002,34.349998,29.965857,24630200
+2008-06-05,34.320000,34.709999,34.099998,34.490002,30.087994,11640300
+2008-06-06,34.139999,34.299999,32.970001,33.009998,28.796881,16597800
+2008-06-09,33.320000,33.340000,32.840000,33.180000,28.945185,10011900
+2008-06-10,32.889999,34.000000,32.889999,33.830002,29.512226,16606900
+2008-06-11,33.750000,33.939999,33.250000,33.270000,29.023697,12419500
+2008-06-12,33.529999,33.700001,33.119999,33.240002,28.997532,13138800
+2008-06-13,33.480000,33.950001,33.349998,33.930000,29.599464,9153300
+2008-06-16,33.770000,33.900002,33.389999,33.650002,29.355202,10955100
+2008-06-17,33.849998,33.860001,33.099998,33.110001,28.884119,9163100
+2008-06-18,32.970001,33.349998,32.770000,32.950001,28.744545,12078500
+2008-06-19,32.959999,33.119999,32.570000,32.889999,28.692198,13913000
+2008-06-20,32.639999,32.730000,31.940001,31.940001,27.863451,21345000
+2008-06-23,32.130001,32.439999,32.040001,32.389999,28.256014,10850800
+2008-06-24,32.250000,32.500000,31.959999,32.220001,28.107714,17528500
+2008-06-25,32.400002,32.840000,32.299999,32.490002,28.343254,16132100
+2008-06-26,32.160000,32.290001,31.520000,31.530001,27.505779,19848000
+2008-06-27,31.530001,31.920000,31.430000,31.570000,27.540672,20844300
+2008-06-30,31.700001,32.150002,31.139999,31.200001,27.217897,15850700
+2008-07-01,30.910000,31.080000,30.400000,31.049999,27.087038,17144900
+2008-07-02,31.110001,31.250000,30.700001,30.709999,26.790438,13633000
+2008-07-03,30.920000,31.200001,30.740000,30.900000,26.956188,6952000
+2008-07-07,30.000000,30.620001,29.740000,30.080000,26.240843,21977600
+2008-07-08,30.000000,30.000000,29.309999,29.950001,26.127436,26083200
+2008-07-09,29.959999,29.980000,29.530001,29.540001,25.769766,13575400
+2008-07-10,29.590000,29.780001,29.250000,29.600000,25.822109,11366700
+2008-07-11,29.260000,29.570000,28.799999,29.200001,25.473160,14948400
+2008-07-14,29.590000,29.870001,29.070000,29.100000,25.385925,14277500
+2008-07-15,28.820000,29.780001,28.549999,29.430000,25.673803,16665800
+2008-07-16,29.500000,30.600000,29.049999,30.430000,26.546173,18261000
+2008-07-17,30.500000,31.230000,30.120001,31.200001,27.217897,17998800
+2008-07-18,31.290001,31.290001,30.660000,30.900000,26.956188,14004500
+2008-07-21,30.959999,31.000000,30.379999,30.490000,26.598513,9782000
+2008-07-22,30.420000,30.549999,29.969999,30.410000,26.528725,21715300
+2008-07-23,30.620001,31.190001,30.400000,31.150000,27.174276,13220100
+2008-07-24,31.139999,31.480000,30.700001,31.020000,27.060869,11780800
+2008-07-25,31.219999,31.520000,31.000000,31.100000,27.130659,12842800
+2008-07-28,31.330000,31.330000,30.459999,30.500000,26.607241,13658900
+2008-07-29,30.600000,31.120001,30.410000,30.920000,26.973633,13298000
+2008-07-30,31.240000,31.770000,30.889999,31.670000,27.627913,18219700
+2008-07-31,30.549999,30.870001,30.129999,30.350000,26.476381,25577400
+2008-08-01,30.500000,30.700001,29.830000,30.080000,26.240843,15120700
+2008-08-04,30.110001,30.590000,30.049999,30.370001,26.493832,11445500
+2008-08-05,30.520000,31.400000,30.400000,31.309999,27.313854,15540000
+2008-08-06,31.129999,31.620001,30.840000,31.420000,27.409817,12841000
+2008-08-07,31.120001,31.270000,30.920000,30.920000,26.973633,10696400
+2008-08-08,31.000000,32.119999,30.950001,32.029999,27.941961,14759800
+2008-08-11,31.969999,33.419998,31.840000,32.799999,28.613684,21907300
+2008-08-12,32.860001,32.919998,32.060001,32.160000,28.055370,14848700
+2008-08-13,32.119999,32.200001,31.520000,31.740000,27.688976,11051300
+2008-08-14,31.670000,32.619999,31.450001,32.330002,28.203671,11535200
+2008-08-15,32.470001,32.919998,32.340000,32.500000,28.351976,11487700
+2008-08-18,32.540001,32.599998,31.809999,32.080002,27.985586,10246000
+2008-08-19,31.840000,32.060001,31.580000,31.750000,27.697699,8781900
+2008-08-20,31.860001,32.200001,31.400000,31.820000,27.758766,12415000
+2008-08-21,31.650000,32.000000,31.320000,31.830000,27.767488,6966700
+2008-08-22,32.119999,32.380001,31.879999,32.200001,28.090267,7954900
+2008-08-25,31.969999,32.090000,31.400000,31.600000,27.566843,8071300
+2008-08-26,31.520000,31.870001,31.440001,31.709999,27.662804,6506600
+2008-08-27,31.459999,32.099998,31.410000,31.760000,27.706425,7317600
+2008-08-28,31.889999,32.669998,31.770000,32.590000,28.430487,9675200
+2008-08-29,32.369999,32.750000,32.310001,32.349998,28.221121,9062800
+2008-09-02,32.740002,33.369999,32.299999,32.439999,28.299633,11843700
+2008-09-03,32.320000,32.540001,31.969999,32.509998,28.360697,9554000
+2008-09-04,32.439999,32.439999,31.490000,31.540001,27.514503,11444300
+2008-09-05,31.450001,31.549999,30.870001,31.360001,27.357473,9012200
+2008-09-08,31.990000,33.049999,31.920000,32.919998,28.718370,20362900
+2008-09-09,32.869999,33.299999,32.430000,32.430000,28.290911,15312400
+2008-09-10,32.599998,33.230000,32.480000,32.770000,28.587513,13883700
+2008-09-11,32.590000,33.000000,32.200001,32.939999,28.735815,14011700
+2008-09-12,32.770000,33.450001,32.599998,33.259998,29.014977,11213500
+2008-09-15,32.590000,33.290001,32.250000,32.360001,28.229847,15191900
+2008-09-16,31.740000,32.889999,31.500000,32.509998,28.360697,18550900
+2008-09-17,32.099998,33.000000,31.910000,32.189999,28.081539,22284700
+2008-09-18,32.250000,33.740002,32.060001,33.439999,29.172001,22442900
+2008-09-19,34.009998,34.849998,33.250000,34.389999,30.000753,28430600
+2008-09-22,33.849998,34.049999,32.840000,32.910000,28.709648,18394300
+2008-09-23,32.880001,33.320000,32.150002,32.529999,28.378145,13450900
+2008-09-24,32.590000,32.590000,31.629999,31.770000,27.715149,13600300
+2008-09-25,32.000000,32.880001,31.969999,32.470001,28.325806,13356400
+2008-09-26,31.940001,32.810001,31.940001,32.750000,28.570070,9546400
+2008-09-29,32.200001,32.950001,29.250000,29.730000,25.935516,26696900
+2008-09-30,30.340000,30.850000,29.480000,30.690001,26.772987,16589100
+2008-10-01,30.299999,30.799999,29.910000,30.680000,26.764267,11020900
+2008-10-02,30.420000,30.809999,29.900000,29.959999,26.136160,11783300
+2008-10-03,30.170000,31.059999,29.459999,29.540001,25.769766,17468400
+2008-10-06,28.840000,29.030001,26.299999,28.260000,24.653135,35803300
+2008-10-07,27.809999,28.350000,26.209999,26.570000,23.178835,27859500
+2008-10-08,25.510000,27.120001,24.930000,25.680000,22.402426,28972100
+2008-10-09,25.620001,26.290001,23.750000,23.799999,20.762371,24391800
+2008-10-10,22.590000,24.420000,21.250000,23.040001,20.099371,37801900
+2008-10-13,23.709999,26.840000,23.500000,26.719999,23.309687,20772800
+2008-10-14,27.959999,28.000000,25.040001,25.680000,22.402426,20214300
+2008-10-15,25.110001,25.230000,23.280001,23.370001,20.387255,18089100
+2008-10-16,23.370001,24.600000,22.070000,24.270000,21.172386,21418200
+2008-10-17,23.690001,25.850000,23.440001,24.750000,21.591118,17985900
+2008-10-20,24.959999,26.549999,24.590000,26.549999,23.161381,14070800
+2008-10-21,26.100000,26.180000,25.090000,25.309999,22.079649,14680800
+2008-10-22,24.680000,24.680000,22.320000,23.049999,20.108091,20567900
+2008-10-23,22.940001,23.639999,21.889999,23.400000,20.413422,17445800
+2008-10-24,21.740000,23.200001,21.500000,22.610001,19.724253,16247600
+2008-10-27,22.020000,22.549999,21.360001,21.370001,18.642513,15081300
+2008-10-28,21.990000,24.230000,21.520000,23.910000,20.858330,21845500
+2008-10-29,23.750000,25.400000,23.500000,23.969999,20.910675,17071300
+2008-10-30,24.920000,25.549999,24.170000,25.330000,22.097094,14182600
+2008-10-31,25.020000,26.469999,24.940001,25.910000,22.603065,17466700
+2008-11-03,25.850000,25.850000,24.190001,25.040001,21.844110,19742300
+2008-11-04,25.290001,26.240000,25.290001,26.020000,22.699030,16761900
+2008-11-05,25.910000,25.969999,24.100000,24.230000,21.137487,15063300
+2008-11-06,24.129999,24.270000,22.320000,22.809999,19.898722,23027700
+2008-11-07,21.549999,24.200001,21.410000,23.360001,20.378531,28705700
+2008-11-10,23.900000,23.900000,21.660000,22.080000,19.261892,16363000
+2008-11-11,21.770000,22.410000,20.490000,21.780001,19.000187,23171800
+2008-11-12,21.360001,21.709999,20.030001,20.160000,17.586946,21258600
+2008-11-13,20.299999,21.750000,19.580000,21.650000,18.886776,24066200
+2008-11-14,21.139999,22.389999,20.940001,21.080000,18.389528,16888600
+2008-11-17,20.080000,20.740000,19.580000,19.740000,17.220556,20652300
+2008-11-18,19.920000,20.980000,19.860001,20.670000,18.031857,21954000
+2008-11-19,20.559999,21.030001,19.930000,19.940001,17.395029,22433000
+2008-11-20,19.590000,20.150000,18.600000,18.730000,16.339458,26921200
+2008-11-21,19.240000,21.129999,18.780001,21.120001,18.424421,28131800
+2008-11-24,21.360001,22.910000,20.600000,22.200001,19.366579,24042800
+2008-11-25,22.299999,22.680000,21.299999,22.030001,19.218277,20331500
+2008-11-26,21.299999,22.770000,20.940001,22.500000,19.628294,13904000
+2008-11-28,22.420000,22.540001,21.820000,22.520000,19.645737,7577100
+2008-12-01,22.040001,22.129999,20.270000,20.330000,17.735252,17969200
+2008-12-02,20.650000,21.549999,20.299999,21.459999,18.721027,20051200
+2008-12-03,20.600000,22.000000,20.570000,21.940001,19.139763,16331800
+2008-12-04,21.540001,22.559999,21.459999,21.790001,19.008913,17198700
+2008-12-05,21.510000,22.840000,20.870001,22.770000,19.863832,17945000
+2008-12-08,23.000000,26.100000,22.910000,24.920000,21.739420,34087700
+2008-12-09,24.080000,24.879999,23.320000,23.530001,20.526833,26341900
+2008-12-10,23.850000,24.200001,23.299999,23.700001,20.675137,16276200
+2008-12-11,23.410000,23.660000,22.580000,22.820000,20.205847,14827300
+2008-12-12,22.120001,22.860001,21.930000,22.610001,20.019907,15025300
+2008-12-15,22.930000,22.980000,22.340000,22.770000,20.161577,12908800
+2008-12-16,23.040001,24.170000,22.570000,23.780001,21.055874,22276500
+2008-12-17,23.570000,23.940001,23.139999,23.580000,20.878786,14715400
+2008-12-18,23.670000,23.900000,22.500000,22.830000,20.214705,18351500
+2008-12-19,23.000000,23.200001,22.180000,22.430000,19.860523,21133800
+2008-12-22,22.410000,22.530001,21.280001,21.930000,19.417801,12596900
+2008-12-23,22.180000,22.500000,21.760000,21.850000,19.346970,9381100
+2008-12-24,21.910000,22.209999,21.790001,21.990000,19.470928,3507200
+2008-12-26,22.290001,22.420000,21.959999,22.180000,19.639164,3069100
+2008-12-29,22.150000,22.150000,21.150000,21.469999,19.010494,7411700
+2008-12-30,21.480000,22.500000,21.379999,22.480000,19.904793,11105200
+2008-12-31,22.570000,22.950001,22.520000,22.690001,20.090738,9012100
+2009-01-02,22.760000,24.030001,22.500000,23.920000,21.179834,9796600
+2009-01-05,23.510000,24.000000,23.290001,23.500000,20.807949,11675100
+2009-01-06,23.809999,24.830000,23.799999,24.309999,21.525162,12070800
+2009-01-07,23.889999,23.889999,22.920000,23.180000,20.524611,12300700
+2009-01-08,23.100000,23.219999,22.510000,22.900000,20.276682,12600700
+2009-01-09,23.120001,23.120001,22.230000,22.309999,19.754271,11489700
+2009-01-12,22.299999,22.480000,21.610001,21.860001,19.355820,11259800
+2009-01-13,21.520000,21.930000,21.030001,21.200001,18.771427,15502200
+2009-01-14,20.930000,21.200001,20.420000,20.799999,18.417250,15482600
+2009-01-15,20.799999,21.610001,19.950001,21.360001,18.913099,18169700
+2009-01-16,21.670000,22.139999,20.780001,21.459999,19.001642,16294800
+2009-01-20,21.459999,21.660000,20.150000,20.240000,17.921398,19725000
+2009-01-21,20.840000,21.309999,20.459999,21.230000,18.797993,17181900
+2009-01-22,20.790001,21.200001,20.469999,20.969999,18.567776,16007900
+2009-01-23,20.430000,20.969999,19.980000,20.610001,18.249016,15436900
+2009-01-26,21.080000,21.420000,20.500000,20.860001,18.470375,10840400
+2009-01-27,20.910000,21.500000,20.850000,21.250000,18.815699,12696600
+2009-01-28,21.639999,22.400000,21.600000,22.280001,19.727713,13079500
+2009-01-29,21.930000,22.020000,21.120001,21.250000,18.815699,11295500
+2009-01-30,21.309999,21.650000,20.510000,20.680000,18.310997,15484400
+2009-02-02,20.080000,20.500000,19.840000,20.200001,17.885984,16499500
+2009-02-03,20.780001,20.790001,19.760000,20.620001,18.257870,27402700
+2009-02-04,19.590000,20.090000,18.709999,19.000000,16.823450,48826100
+2009-02-05,18.870001,19.299999,18.170000,18.709999,16.566669,34993800
+2009-02-06,18.760000,19.639999,18.650000,19.450001,17.221901,20471300
+2009-02-09,19.820000,19.820000,19.100000,19.440001,17.213043,15404300
+2009-02-10,19.280001,19.440001,18.639999,18.760000,16.610943,22253200
+2009-02-11,18.950001,18.980000,18.260000,18.500000,16.380730,19154300
+2009-02-12,18.330000,18.879999,18.030001,18.830000,16.672924,20367000
+2009-02-13,18.719999,18.900000,18.400000,18.520000,16.398434,11804200
+2009-02-17,18.070000,18.110001,17.719999,17.840000,15.796331,21996100
+2009-02-18,17.950001,18.010000,17.530001,17.629999,15.610388,18669100
+2009-02-19,17.780001,18.010000,17.219999,17.690001,15.663515,19575900
+2009-02-20,17.250000,17.879999,17.129999,17.530001,15.521845,22969800
+2009-02-23,17.709999,17.840000,16.920000,16.969999,15.025996,15134700
+2009-02-24,17.110001,18.150000,16.969999,17.920000,15.867169,16498400
+2009-02-25,17.780001,17.799999,17.030001,17.360001,15.371320,16146500
+2009-02-26,17.540001,17.600000,16.840000,16.860001,14.928598,12636900
+2009-02-27,16.680000,17.129999,16.420000,16.770000,14.848909,18621000
+2009-03-02,16.480000,16.549999,15.900000,16.049999,14.211387,17103200
+2009-03-03,16.160000,16.690001,15.990000,16.360001,14.485877,19551300
+2009-03-04,16.520000,17.219999,16.090000,16.940001,14.999434,15800100
+2009-03-05,16.500000,16.600000,15.850000,15.990000,14.158260,17038600
+2009-03-06,15.840000,16.420000,15.320000,15.830000,14.016590,20629300
+2009-03-09,15.480000,16.240000,15.460000,15.590000,13.804083,14569200
+2009-03-10,15.230000,16.620001,15.140000,16.610001,14.707236,24287100
+2009-03-11,16.760000,16.980000,16.379999,16.590000,14.689528,14956900
+2009-03-12,16.799999,17.490000,16.570000,17.430000,15.433299,16768100
+2009-03-13,17.639999,17.639999,17.010000,17.129999,15.167665,17520000
+2009-03-16,17.270000,17.690001,17.120001,17.209999,15.238502,18963300
+2009-03-17,17.200001,17.830000,17.160000,17.820000,15.778624,11424200
+2009-03-18,17.719999,18.370001,17.500000,18.110001,16.035404,16190600
+2009-03-19,18.150000,18.299999,17.480000,17.700001,15.672373,17370100
+2009-03-20,17.799999,17.980000,17.080000,17.450001,15.451011,17766600
+2009-03-23,17.830000,18.920000,17.740000,18.920000,16.752615,15830500
+2009-03-24,18.090000,18.700001,18.040001,18.290001,16.194784,16570800
+2009-03-25,18.340000,18.879999,17.860001,18.530001,16.407291,16019200
+2009-03-26,18.680000,19.139999,18.400000,19.059999,16.876575,15556800
+2009-03-27,18.809999,18.950001,18.400000,18.590000,16.460417,13962400
+2009-03-30,18.120001,18.260000,17.540001,17.850000,15.805190,13150500
+2009-03-31,18.059999,18.490000,17.820000,18.160000,16.079678,14704500
+2009-04-01,17.920000,18.820000,17.840000,18.790001,16.637506,12933600
+2009-04-02,19.250000,20.500000,19.150000,20.209999,17.894836,25194100
+2009-04-03,19.910000,20.389999,19.780001,20.000000,17.708895,18478000
+2009-04-06,19.780001,20.000000,19.350000,19.620001,17.372425,11280900
+2009-04-07,19.379999,19.379999,19.030001,19.120001,16.929703,11623500
+2009-04-08,19.190001,19.549999,19.000000,19.360001,17.142210,12438100
+2009-04-09,19.830000,20.070000,19.600000,19.879999,17.602640,13841800
+2009-04-13,19.600000,19.690001,19.270000,19.530001,17.292734,14907400
+2009-04-14,19.350000,19.389999,18.799999,19.020000,16.841162,14315900
+2009-04-15,19.120001,19.799999,19.040001,19.730000,17.469824,12706700
+2009-04-16,19.900000,20.639999,19.719999,20.510000,18.160469,15449300
+2009-04-17,20.570000,20.700001,20.080000,20.379999,18.045362,14911100
+2009-04-20,20.000000,20.010000,19.230000,19.410000,17.186480,14272900
+2009-04-21,19.219999,19.510000,19.150000,19.469999,17.239607,11611000
+2009-04-22,19.299999,19.740000,19.000000,19.400000,17.177629,15812000
+2009-04-23,19.420000,19.730000,19.160000,19.629999,17.381277,14190400
+2009-04-24,19.840000,20.459999,19.559999,20.260000,17.939112,16599400
+2009-04-27,19.620001,20.150000,19.600000,19.740000,17.478676,17855500
+2009-04-28,19.600000,19.790001,19.309999,19.510000,17.275023,23172500
+2009-04-29,19.680000,21.430000,19.469999,21.010000,18.603191,28270400
+2009-04-30,21.280001,22.570000,21.150000,21.900000,19.391241,36593300
+2009-05-01,21.760000,21.980000,21.120001,21.940001,19.426657,15718000
+2009-05-04,22.010000,22.900000,21.840000,22.860001,20.241268,17603700
+2009-05-05,22.690001,23.340000,22.660000,23.150000,20.498045,16806900
+2009-05-06,24.950001,26.290001,24.660000,25.870001,22.906456,49777100
+2009-05-07,25.799999,26.170000,24.980000,25.330000,22.428314,23981600
+2009-05-08,25.700001,26.000000,25.000000,25.459999,22.543421,18172900
+2009-05-11,25.110001,25.170000,24.680000,24.709999,21.879339,14656600
+2009-05-12,24.900000,24.900000,23.900000,24.320000,21.534014,15536900
+2009-05-13,23.889999,23.990000,23.430000,23.600000,20.896494,14055900
+2009-05-14,23.650000,23.969999,23.270000,23.480000,20.790241,11893400
+2009-05-15,23.450001,24.219999,23.270000,23.410000,20.728260,20991800
+2009-05-18,23.620001,24.260000,23.549999,24.209999,21.436617,9355300
+2009-05-19,24.250000,24.400000,23.750000,23.820000,21.091293,11906400
+2009-05-20,24.000000,24.280001,23.410000,23.549999,20.852221,10524200
+2009-05-21,23.080000,23.299999,22.930000,23.230000,20.568876,12632300
+2009-05-22,23.250000,24.000000,23.049999,23.700001,20.985041,11099100
+2009-05-26,23.469999,24.610001,23.379999,24.540001,21.728813,16299800
+2009-05-27,24.440001,24.719999,23.770000,23.870001,21.135565,14981300
+2009-05-28,24.070000,24.400000,23.490000,23.990000,21.241821,12314700
+2009-05-29,24.139999,24.250000,23.500000,24.219999,21.445473,11215600
+2009-06-01,24.830000,25.260000,24.750000,25.030001,22.162680,11990000
+2009-06-02,25.090000,25.459999,24.879999,25.129999,22.251225,9792900
+2009-06-03,24.860001,25.160000,24.750000,25.080000,22.206953,10892000
+2009-06-04,25.010000,25.219999,24.809999,25.139999,22.260082,11521800
+2009-06-05,25.480000,25.549999,24.850000,24.950001,22.091846,10980400
+2009-06-08,24.940001,25.549999,24.629999,25.330000,22.428314,10700200
+2009-06-09,25.180000,25.490000,25.080000,25.330000,22.428314,8534800
+2009-06-10,25.469999,25.520000,24.820000,25.100000,22.224663,11791200
+2009-06-11,25.150000,25.639999,25.100000,25.230000,22.339769,9577300
+2009-06-12,25.030001,25.389999,24.520000,25.059999,22.189245,7332400
+2009-06-15,24.780001,24.780001,24.040001,24.250000,21.472036,7871500
+2009-06-16,24.309999,24.430000,23.370001,23.490000,20.799097,13346200
+2009-06-17,23.430000,23.950001,23.360001,23.680000,20.967329,8615100
+2009-06-18,23.760000,23.969999,23.510000,23.719999,21.002748,9313800
+2009-06-19,23.889999,24.120001,23.379999,23.530001,20.834511,13805600
+2009-06-22,23.190001,23.330000,22.629999,22.660000,20.064173,12356600
+2009-06-23,22.719999,23.070000,22.549999,22.879999,20.258972,9802300
+2009-06-24,23.080000,23.480000,22.780001,22.900000,20.276682,10006700
+2009-06-25,22.860001,23.730000,22.780001,23.549999,20.852221,11652800
+2009-06-26,23.420000,23.559999,23.150000,23.430000,20.745970,12086600
+2009-06-29,23.500000,23.840000,23.309999,23.660000,20.949619,7735600
+2009-06-30,23.700001,23.750000,23.040001,23.330000,20.657425,10805200
+2009-07-01,23.500000,23.930000,23.389999,23.450001,20.763681,8102100
+2009-07-02,23.180000,23.450001,22.570000,22.840000,20.223557,15561200
+2009-07-06,22.680000,23.139999,22.600000,23.090000,20.444918,14452200
+2009-07-07,23.110001,23.219999,22.500000,22.530001,19.949070,15263700
+2009-07-08,22.680000,22.840000,22.049999,22.240000,19.692291,14542800
+2009-07-09,22.379999,22.639999,22.100000,22.530001,19.949070,9779600
+2009-07-10,22.290001,22.719999,22.230000,22.410000,19.842817,11076000
+2009-07-13,22.490000,22.780001,22.070000,22.700001,20.099594,8433300
+2009-07-14,22.719999,23.209999,22.660000,23.110001,20.462627,10361500
+2009-07-15,23.370001,24.129999,23.370001,24.080000,21.321510,12112800
+2009-07-16,24.540001,24.990000,24.360001,24.790001,21.950174,15463500
+2009-07-17,24.809999,24.830000,24.430000,24.510000,21.702251,10516600
+2009-07-20,24.980000,25.430000,24.770000,25.370001,22.463736,13801500
+2009-07-21,25.389999,25.540001,24.959999,25.200001,22.313208,13081600
+2009-07-22,25.090000,25.600000,24.990000,25.379999,22.472586,8490700
+2009-07-23,25.450001,26.840000,25.309999,26.799999,23.729919,17220100
+2009-07-24,26.629999,26.820000,26.170000,26.580000,23.535120,8818000
+2009-07-27,26.549999,26.680000,26.219999,26.450001,23.420013,9796800
+2009-07-28,26.570000,26.590000,26.090000,26.370001,23.349180,10791500
+2009-07-29,26.180000,26.639999,25.780001,25.889999,22.924164,11834300
+2009-07-30,26.379999,26.700001,26.190001,26.219999,23.216358,17084100
+2009-07-31,25.389999,25.639999,25.040001,25.120001,22.242373,25054700
+2009-08-03,25.520000,25.610001,25.360001,25.520000,22.596550,18161500
+2009-08-04,25.350000,25.660000,25.240000,25.639999,22.702803,10073700
+2009-08-05,25.639999,25.700001,24.990000,25.270000,22.375191,12043000
+2009-08-06,25.350000,25.620001,25.070000,25.370001,22.463736,9145200
+2009-08-07,25.629999,26.830000,25.600000,26.690001,23.632517,14579100
+2009-08-10,26.420000,26.540001,26.139999,26.440001,23.411160,8199100
+2009-08-11,26.330000,26.410000,25.770000,25.900000,22.933020,8745200
+2009-08-12,25.910000,26.469999,25.750000,26.219999,23.216358,9024000
+2009-08-13,26.280001,26.350000,25.709999,26.260000,23.251778,10621800
+2009-08-14,26.230000,26.469999,25.540001,25.860001,22.897600,8901400
+2009-08-17,25.379999,25.540001,24.889999,25.090000,22.215809,9396100
+2009-08-18,25.230000,25.280001,24.950001,25.200001,22.313208,9701200
+2009-08-19,25.000000,25.600000,25.000000,25.480000,22.561132,11145500
+2009-08-20,25.530001,25.990000,25.469999,25.889999,22.924164,10030200
+2009-08-21,26.049999,26.860001,25.870001,26.790001,23.721064,13723300
+2009-08-24,26.990000,27.120001,26.730000,26.799999,23.729919,9356400
+2009-08-25,26.980000,27.299999,26.799999,26.870001,23.791899,11427900
+2009-08-26,26.719999,27.200001,26.590000,27.000000,23.907007,10588200
+2009-08-27,27.100000,27.110001,26.510000,27.010000,23.915861,9044600
+2009-08-28,27.080000,27.080000,26.590000,26.840000,23.765331,9270100
+2009-08-31,26.490000,26.969999,25.750000,26.040001,23.056978,29465600
+2009-09-01,25.889999,26.670000,25.590000,25.680000,22.738220,26083200
+2009-09-02,25.629999,25.650000,25.260000,25.400000,22.490295,14833200
+2009-09-03,25.590000,25.629999,25.250000,25.440001,22.525715,11087600
+2009-09-04,25.450001,26.030001,25.350000,25.900000,22.933020,11638500
+2009-09-08,26.309999,26.360001,26.090000,26.280001,23.269491,12399400
+2009-09-09,26.290001,26.990000,26.150000,26.950001,23.862734,19096600
+2009-09-10,26.959999,28.370001,26.959999,28.360001,25.111214,21856400
+2009-09-11,28.350000,28.420000,28.010000,28.420000,25.164339,14221600
+2009-09-14,28.190001,28.290001,27.790001,28.080000,24.863287,12298300
+2009-09-15,28.000000,28.370001,27.830000,28.290001,25.049232,10712200
+2009-09-16,28.309999,28.459999,28.100000,28.370001,25.120066,10020300
+2009-09-17,28.309999,28.530001,28.139999,28.459999,25.199755,13941400
+2009-09-18,28.570000,28.680000,28.250000,28.440001,25.182049,14104800
+2009-09-21,28.120001,28.180000,27.660000,28.000000,24.792452,10935700
+2009-09-22,28.129999,28.400000,28.010000,28.379999,25.128922,9578900
+2009-09-23,28.490000,28.600000,28.040001,28.080000,24.863287,11057300
+2009-09-24,28.129999,28.320000,27.650000,27.969999,24.765888,11950300
+2009-09-25,28.000000,28.020000,27.530001,27.620001,24.455986,13628000
+2009-09-28,27.770000,28.379999,27.719999,28.230000,24.996103,8633100
+2009-09-29,28.260000,28.440001,27.940001,27.940001,24.739328,8057800
+2009-09-30,27.930000,28.000000,27.250000,27.459999,24.314314,18600700
+2009-10-01,27.760000,27.760000,27.000000,27.360001,24.225767,16124900
+2009-10-02,27.049999,27.270000,26.840000,27.209999,24.092953,14635300
+2009-10-05,27.160000,27.770000,27.000000,27.670000,24.500257,10263300
+2009-10-06,27.830000,28.330000,27.670000,28.180000,24.951830,11016400
+2009-10-07,28.290001,28.450001,27.959999,28.170000,24.942980,6861400
+2009-10-08,28.270000,28.990000,28.250000,28.670000,25.385698,10366700
+2009-10-09,28.670000,28.760000,28.450001,28.639999,25.359133,7091600
+2009-10-12,28.709999,29.150000,28.490000,28.639999,25.359133,6960000
+2009-10-13,28.600000,28.790001,28.360001,28.410000,25.155485,9426400
+2009-10-14,28.690001,29.000000,28.480000,28.910000,25.598206,9541600
+2009-10-15,28.730000,29.250000,28.700001,29.230000,25.881548,8734100
+2009-10-16,29.020000,29.520000,28.809999,29.400000,26.032076,12048800
+2009-10-19,29.400000,29.980000,29.200001,29.870001,26.448229,12627300
+2009-10-20,29.730000,29.730000,28.990000,29.350000,25.987803,10377800
+2009-10-21,29.330000,29.959999,29.200001,29.230000,25.881548,9611300
+2009-10-22,29.070000,29.570000,28.850000,29.440001,26.067493,7547900
+2009-10-23,29.360001,29.469999,28.690001,28.889999,25.580496,9778300
+2009-10-26,28.870001,29.469999,28.420000,28.650000,25.367990,13255900
+2009-10-27,28.629999,28.930000,28.010000,28.170000,24.942980,11176400
+2009-10-28,28.070000,28.160000,27.230000,27.240000,24.119516,14359600
+2009-10-29,27.459999,28.160000,27.180000,28.139999,24.916412,13171500
+2009-10-30,28.139999,28.309999,27.280001,27.370001,24.234625,12957000
+2009-11-02,27.379999,27.860001,27.010000,27.410000,24.270039,13301400
+2009-11-03,27.260000,27.730000,27.100000,27.620001,24.455986,12044500
+2009-11-04,28.120001,28.730000,27.900000,28.030001,24.819016,12832900
+2009-11-05,28.230000,29.100000,28.230000,29.000000,25.677896,12129800
+2009-11-06,28.790001,29.000000,28.340000,28.559999,25.288301,11862600
+2009-11-09,28.670000,29.070000,28.230000,29.000000,25.677896,12643300
+2009-11-10,29.010000,29.299999,28.950001,29.120001,25.784149,16938200
+2009-11-11,29.209999,29.469999,29.110001,29.290001,25.934675,11551800
+2009-11-12,29.400000,29.459999,28.900000,29.049999,25.722170,18282100
+2009-11-13,29.750000,30.530001,29.709999,30.440001,26.952936,29084900
+2009-11-16,30.120001,30.879999,30.030001,30.700001,27.183153,12891200
+2009-11-17,30.270000,30.930000,30.250000,30.870001,27.333677,10173600
+2009-11-18,30.680000,30.780001,30.299999,30.670000,27.156590,7843500
+2009-11-19,30.260000,30.400000,29.920000,30.209999,26.749281,8148500
+2009-11-20,30.070000,30.250000,29.850000,30.010000,26.572197,9205600
+2009-11-23,30.309999,30.760000,30.309999,30.480000,26.988352,8046300
+2009-11-24,30.459999,30.530001,30.100000,30.230000,26.766994,7829700
+2009-11-25,30.240000,30.750000,30.000000,30.610001,27.103462,10274900
+2009-11-27,30.070000,30.500000,29.799999,30.350000,26.873247,5688000
+2009-11-30,30.230000,30.370001,29.950001,30.219999,26.758139,10260500
+2009-12-01,30.440001,30.770000,30.440001,30.730000,27.209713,9318200
+2009-12-02,30.559999,30.870001,30.500000,30.790001,27.262840,11386300
+2009-12-03,30.799999,30.969999,30.299999,30.330000,26.855537,9511700
+2009-12-04,30.680000,31.100000,30.420000,30.840000,27.307117,14406400
+2009-12-07,30.930000,31.240000,30.820000,31.000000,27.448784,12451500
+2009-12-08,30.870001,30.980000,30.450001,30.680000,27.165445,8703000
+2009-12-09,30.930000,30.930000,30.520000,30.700001,27.183153,9267900
+2009-12-10,30.850000,31.530001,30.750000,31.299999,28.034023,13609600
+2009-12-11,31.490000,31.830000,31.370001,31.700001,28.392288,15481000
+2009-12-14,31.920000,32.080002,31.809999,31.830000,28.508720,9230700
+2009-12-15,31.700001,32.330002,31.610001,32.180000,28.822201,15183800
+2009-12-16,32.200001,32.590000,32.200001,32.430000,29.046114,11940200
+2009-12-17,32.169998,32.290001,31.650000,31.930000,28.598288,14592500
+2009-12-18,32.080002,32.169998,31.580000,31.950001,28.616199,15229700
+2009-12-21,32.049999,32.490002,31.990000,32.400002,29.019249,7702900
+2009-12-22,32.310001,32.500000,32.220001,32.310001,28.938639,9162000
+2009-12-23,32.310001,32.470001,32.040001,32.430000,29.046114,6567900
+2009-12-24,32.020000,32.340000,31.940001,32.290001,28.920723,3557400
+2009-12-28,32.240002,32.360001,31.799999,31.910000,28.580378,7981600
+2009-12-29,32.049999,32.480000,32.009998,32.380001,29.001333,6242100
+2009-12-30,32.310001,32.380001,32.099998,32.279999,28.911768,6692500
+2009-12-31,32.270000,32.750000,32.220001,32.250000,28.884899,19651700
+2010-01-04,32.500000,32.750000,31.870001,32.070000,28.723677,13700400
+2010-01-05,32.070000,32.160000,31.700001,31.990000,28.652029,10307700
+2010-01-06,31.900000,32.000000,31.680000,31.820000,28.499765,10709500
+2010-01-07,31.770000,31.860001,31.540001,31.830000,28.508720,8202100
+2010-01-08,31.660000,31.940001,31.530001,31.879999,28.553509,7657500
+2010-01-11,31.530001,31.740000,31.090000,31.360001,28.087765,11250500
+2010-01-12,31.020000,31.120001,30.680000,30.820000,27.604109,14618300
+2010-01-13,30.889999,31.370001,30.870001,31.290001,28.025070,9620600
+2010-01-14,31.129999,31.180000,30.959999,31.020000,27.783241,9235600
+2010-01-15,31.010000,31.150000,30.410000,30.600000,27.407066,13936400
+2010-01-19,30.590000,31.190001,30.500000,31.010000,27.774282,9662100
+2010-01-20,31.000000,31.240000,30.719999,31.190001,27.935507,13302800
+2010-01-21,31.139999,31.420000,30.570000,30.610001,27.416023,15403600
+2010-01-22,30.500000,30.709999,29.950001,29.980000,26.851757,14459400
+2010-01-25,30.219999,30.230000,29.549999,29.920000,26.798023,11155800
+2010-01-26,29.799999,29.940001,29.320000,29.660000,26.565147,11197300
+2010-01-27,29.559999,29.809999,29.010000,29.320000,26.260624,16121200
+2010-01-28,29.430000,29.480000,28.709999,29.350000,26.287498,17126000
+2010-01-29,29.370001,29.820000,29.250000,29.549999,26.466625,14507900
+2010-02-01,29.600000,29.770000,29.340000,29.520000,26.439756,10894200
+2010-02-02,29.530001,30.010000,29.219999,29.980000,26.851757,10100900
+2010-02-03,29.920000,30.870001,29.790001,30.750000,27.541412,15976200
+2010-02-04,30.280001,30.490000,29.660000,29.670000,26.574104,13407100
+2010-02-05,29.629999,29.840000,28.990000,29.540001,26.457672,11956100
+2010-02-08,29.760000,29.950001,29.459999,29.480000,26.403933,10656800
+2010-02-09,29.889999,30.040001,29.480000,29.840000,26.726368,14376800
+2010-02-10,29.160000,30.090000,29.030001,30.030001,26.896542,24191900
+2010-02-11,29.820000,30.430000,29.760000,30.160000,27.012978,12389400
+2010-02-12,29.860001,30.200001,29.860001,30.070000,26.932367,12802700
+2010-02-16,30.280001,30.500000,30.170000,30.469999,27.290630,9061600
+2010-02-17,30.540001,30.930000,30.379999,30.860001,27.639938,9768600
+2010-02-18,30.860001,31.070000,30.680000,31.020000,27.783241,8212300
+2010-02-19,30.940001,31.520000,30.850000,31.230000,27.971331,12595600
+2010-02-22,31.350000,31.469999,31.059999,31.120001,27.872808,7868200
+2010-02-23,31.030001,31.270000,30.860001,30.920000,27.693676,10228300
+2010-02-24,31.049999,31.500000,31.010000,31.389999,28.114630,12062100
+2010-02-25,30.879999,31.410000,30.850000,31.360001,28.087765,10553800
+2010-02-26,31.389999,31.410000,30.980000,31.240000,27.980284,13449800
+2010-03-01,31.400000,31.600000,31.340000,31.540001,28.248981,8681000
+2010-03-02,31.410000,31.920000,31.389999,31.879999,28.553509,14070600
+2010-03-03,31.950001,31.959999,31.549999,31.639999,28.338549,10345500
+2010-03-04,32.020000,32.860001,31.920000,32.570000,29.171509,23075400
+2010-03-05,32.790001,33.220001,32.750000,33.220001,29.753685,19204800
+2010-03-08,32.959999,33.500000,32.950001,33.189999,29.726812,13176600
+2010-03-09,33.000000,33.340000,32.779999,33.310001,29.834291,11072700
+2010-03-10,33.180000,33.599998,33.099998,33.330002,29.852209,9652900
+2010-03-11,33.320000,33.810001,33.119999,33.810001,30.282120,11392500
+2010-03-12,33.810001,33.810001,33.470001,33.689999,30.174641,9610400
+2010-03-15,33.520000,33.750000,33.360001,33.720001,30.201515,8235900
+2010-03-16,33.740002,33.830002,33.490002,33.750000,30.228388,8472600
+2010-03-17,33.790001,33.840000,33.529999,33.750000,30.228388,7858400
+2010-03-18,33.689999,33.849998,33.520000,33.779999,30.255249,8044200
+2010-03-19,33.639999,33.980000,33.480000,33.639999,30.129860,16729000
+2010-03-22,33.470001,34.189999,33.419998,33.950001,30.407511,11717600
+2010-03-23,34.000000,34.099998,33.730000,34.009998,30.461250,7038000
+2010-03-24,33.910000,34.430000,33.869999,34.389999,30.801603,12195200
+2010-03-25,34.490002,35.500000,34.419998,35.090000,31.428555,32705900
+2010-03-26,34.889999,35.599998,34.849998,35.310001,31.625605,22909800
+2010-03-29,35.279999,35.419998,34.970001,35.160000,31.491255,12560500
+2010-03-30,35.130001,35.200001,34.759998,35.099998,31.437515,17201800
+2010-03-31,35.070000,35.200001,34.810001,34.910000,31.267342,10789500
+2010-04-01,35.070000,35.590000,35.009998,35.549999,31.840559,14603000
+2010-04-05,35.279999,35.540001,35.070000,35.220001,31.544994,10607900
+2010-04-06,35.240002,35.610001,35.099998,35.470001,31.768913,12637700
+2010-04-07,35.389999,35.490002,35.060001,35.320000,31.634567,9242200
+2010-04-08,35.279999,35.750000,35.139999,35.669998,31.948044,9915900
+2010-04-09,35.820000,36.880001,35.639999,36.220001,32.440651,21346000
+2010-04-12,36.139999,36.330002,35.799999,35.889999,32.145084,10410800
+2010-04-13,35.869999,36.060001,35.709999,35.840000,32.100304,10922200
+2010-04-14,35.889999,36.209999,35.770000,36.200001,32.422741,9267400
+2010-04-15,36.080002,36.509998,35.860001,36.400002,32.601871,9185900
+2010-04-16,36.220001,36.509998,35.680000,35.830002,32.091343,12327500
+2010-04-19,35.799999,36.169998,35.580002,36.169998,32.395870,9840500
+2010-04-20,36.279999,36.650002,36.279999,36.540001,32.727257,16515000
+2010-04-21,36.389999,36.750000,36.349998,36.560001,32.745178,9039700
+2010-04-22,36.299999,36.869999,36.119999,36.779999,32.942215,10494600
+2010-04-23,36.799999,36.849998,36.490002,36.790001,32.951180,7677100
+2010-04-26,36.759998,37.139999,36.669998,37.040001,33.175083,9983200
+2010-04-27,36.779999,36.820000,36.160000,36.320000,32.530209,10931300
+2010-04-28,36.520000,36.549999,35.959999,36.290001,32.503349,10930900
+2010-04-29,36.500000,37.389999,36.500000,37.220001,33.336308,10576700
+2010-04-30,37.189999,37.490002,36.599998,36.840000,32.995953,11456400
+2010-05-03,36.950001,37.980000,36.840000,37.560001,33.640827,10887500
+2010-05-04,37.290001,37.299999,36.320000,36.590000,32.772041,13195700
+2010-05-05,36.430000,36.560001,35.080002,35.340000,31.652475,17960000
+2010-05-06,35.160000,35.549999,31.000000,34.009998,30.461250,25181500
+2010-05-07,33.860001,34.349998,32.770000,33.410000,29.923861,22579800
+2010-05-10,34.990002,35.630001,34.560001,35.290001,31.607693,18282600
+2010-05-11,35.520000,36.250000,35.139999,35.759998,32.028645,19853200
+2010-05-12,34.980000,36.209999,34.820000,35.130001,31.464384,25533300
+2010-05-13,35.180000,35.529999,34.740002,34.750000,31.124035,15445900
+2010-05-14,34.660000,34.779999,33.830002,34.060001,30.506033,16945200
+2010-05-17,34.220001,34.340000,33.290001,34.200001,30.631426,15136700
+2010-05-18,34.360001,34.639999,33.520000,33.730000,30.210466,17393800
+2010-05-19,33.470001,33.799999,33.090000,33.389999,29.905945,18536600
+2010-05-20,32.759998,32.959999,31.990000,31.990000,28.652029,26615500
+2010-05-21,31.400000,32.869999,31.400000,32.869999,29.440201,20122900
+2010-05-24,32.560001,32.990002,32.110001,32.480000,29.090900,14040200
+2010-05-25,31.580000,32.369999,31.209999,32.320000,28.947596,26555900
+2010-05-26,32.509998,33.849998,32.139999,33.070000,29.619339,25296300
+2010-05-27,33.599998,34.369999,33.480000,34.369999,30.783686,16214500
+2010-05-28,33.700001,34.049999,33.240002,33.419998,29.932816,15832700
+2010-06-01,33.060001,33.869999,32.849998,33.330002,29.852209,15438800
+2010-06-02,33.570000,34.790001,33.459999,34.740002,31.115086,18309300
+2010-06-03,34.919998,35.099998,34.310001,34.709999,31.088211,17752500
+2010-06-04,34.060001,34.349998,33.439999,33.689999,30.174641,21813100
+2010-06-07,33.700001,34.070000,33.009998,33.040001,29.592464,14577700
+2010-06-08,33.029999,33.320000,32.410000,33.150002,29.690987,15662500
+2010-06-09,33.299999,33.820000,32.830002,32.939999,29.502895,13707600
+2010-06-10,33.529999,34.230000,33.450001,34.110001,30.550819,13720200
+2010-06-11,33.849998,34.259998,33.580002,34.240002,30.667252,8799700
+2010-06-14,34.490002,34.570000,33.919998,33.930000,30.389603,9336800
+2010-06-15,34.110001,35.000000,34.110001,34.990002,31.339001,11552200
+2010-06-16,34.790001,34.930000,34.480000,34.790001,31.159864,9416200
+2010-06-17,34.799999,35.110001,34.599998,35.070000,31.410652,10951100
+2010-06-18,35.189999,35.349998,34.919998,35.150002,31.482300,14672300
+2010-06-21,35.500000,35.830002,34.860001,35.060001,31.401688,11382500
+2010-06-22,35.119999,35.459999,34.290001,34.349998,30.765776,9731000
+2010-06-23,34.360001,34.630001,34.049999,34.340000,30.756819,9474000
+2010-06-24,34.130001,34.180000,33.459999,33.599998,30.094034,10273200
+2010-06-25,33.509998,33.680000,33.119999,33.480000,29.986551,11155500
+2010-06-28,33.560001,33.759998,33.049999,33.220001,29.753685,7802000
+2010-06-29,33.310001,33.369999,32.040001,32.299999,28.929680,15868300
+2010-06-30,32.160000,32.509998,31.360001,31.500000,28.213154,17186400
+2010-07-01,31.469999,32.099998,30.910000,31.490000,28.204201,19906200
+2010-07-02,31.559999,31.680000,30.719999,31.379999,28.105675,15597300
+2010-07-06,31.770000,32.299999,31.379999,31.719999,28.410198,15215500
+2010-07-07,31.730000,33.169998,31.660000,33.139999,29.682034,14309500
+2010-07-08,33.330002,33.540001,32.900002,33.340000,29.861162,10044000
+2010-07-09,33.380001,33.799999,33.369999,33.750000,30.228388,8530900
+2010-07-12,33.580002,33.889999,33.389999,33.770000,30.246296,10094300
+2010-07-13,34.110001,34.669998,33.959999,34.450001,30.855341,8653000
+2010-07-14,34.240002,34.509998,33.919998,34.240002,30.667252,7955100
+2010-07-15,34.150002,34.419998,33.599998,34.049999,30.497082,10656300
+2010-07-16,33.889999,34.000000,32.919998,33.029999,29.583509,13060100
+2010-07-19,33.090000,33.500000,32.990002,33.310001,29.834291,9647000
+2010-07-20,33.330002,33.450001,32.799999,33.400002,29.914902,11632000
+2010-07-21,33.470001,33.599998,32.630001,32.810001,29.386467,10151100
+2010-07-22,33.090000,33.770000,33.049999,33.590000,30.085073,9984500
+2010-07-23,33.520000,34.209999,33.450001,34.130001,30.568729,10790200
+2010-07-26,34.090000,34.720001,34.090000,34.400002,30.810562,10104400
+2010-07-27,34.660000,34.660000,33.959999,34.279999,30.703081,7103000
+2010-07-28,34.189999,34.209999,33.799999,34.040001,30.488123,6089200
+2010-07-29,34.220001,34.799999,33.419998,33.709999,30.192554,9938400
+2010-07-30,33.369999,33.880001,33.020000,33.689999,30.174641,7805000
+2010-08-02,34.139999,34.680000,34.130001,34.480000,30.882210,9948800
+2010-08-03,34.439999,34.549999,34.020000,34.209999,30.640383,8503300
+2010-08-04,34.410000,34.959999,34.139999,34.830002,31.195690,9840700
+2010-08-05,34.680000,35.049999,34.560001,34.980000,31.330036,8407300
+2010-08-06,34.580002,35.090000,34.389999,35.000000,31.347950,8061700
+2010-08-09,34.959999,35.230000,34.900002,35.160000,31.491255,7809300
+2010-08-10,34.889999,35.410000,34.730000,35.290001,31.607693,12353400
+2010-08-11,34.770000,34.770000,34.130001,34.220001,30.649340,15028900
+2010-08-12,33.849998,34.299999,33.709999,33.990002,30.443342,11809600
+2010-08-13,33.790001,33.810001,33.410000,33.680000,30.165684,11058100
+2010-08-16,33.439999,33.750000,33.110001,33.660000,30.147774,6917800
+2010-08-17,33.919998,33.990002,33.509998,33.759998,30.237337,9875200
+2010-08-18,33.810001,34.220001,33.400002,33.910000,30.371687,8325000
+2010-08-19,33.720001,33.779999,33.060001,33.189999,29.726812,12070700
+2010-08-20,33.000000,33.200001,32.709999,33.049999,29.601421,8632700
+2010-08-23,33.230000,33.549999,32.799999,32.930000,29.493942,8046500
+2010-08-24,32.490002,32.700001,31.969999,32.139999,28.786377,14229700
+2010-08-25,31.870001,32.240002,31.549999,32.110001,28.759506,14144000
+2010-08-26,32.180000,32.439999,31.879999,31.940001,28.607243,10120500
+2010-08-27,32.130001,32.849998,31.580000,32.779999,29.359596,11144000
+2010-08-30,32.730000,32.990002,32.320000,32.349998,28.974463,7057900
+2010-08-31,32.200001,32.830002,31.910000,32.540001,29.144638,12520700
+2010-09-01,33.000000,33.700001,32.910000,33.509998,30.013424,10020900
+2010-09-02,33.619999,33.980000,33.529999,33.910000,30.371687,7034600
+2010-09-03,34.020000,34.700001,34.020000,34.669998,31.052383,8473800
+2010-09-07,34.500000,34.630001,33.660000,33.840000,30.308990,10733100
+2010-09-08,33.880001,34.099998,33.750000,33.840000,30.308990,8370900
+2010-09-09,34.340000,34.490002,34.000000,34.080002,30.523951,6990600
+2010-09-10,34.160000,34.279999,34.049999,34.150002,30.586645,7362900
+2010-09-13,34.419998,34.669998,33.919998,34.270000,30.694126,8826700
+2010-09-14,34.230000,34.230000,33.720001,34.049999,30.497082,7761800
+2010-09-15,34.009998,34.419998,33.880001,34.209999,30.640383,7262300
+2010-09-16,34.090000,34.189999,33.709999,34.169998,30.604551,7297600
+2010-09-17,34.380001,34.730000,34.360001,34.560001,30.953863,19012800
+2010-09-20,34.560001,34.990002,34.529999,34.900002,31.258387,10219200
+2010-09-21,34.910000,34.980000,34.270000,34.500000,30.900126,11630600
+2010-09-22,34.570000,34.810001,33.900002,33.990002,30.443342,9897700
+2010-09-23,33.669998,33.730000,33.000000,33.119999,29.664114,18090900
+2010-09-24,33.380001,33.820000,33.310001,33.580002,30.076124,10657100
+2010-09-27,33.599998,33.660000,33.279999,33.290001,29.816383,7884000
+2010-09-28,33.369999,33.470001,32.939999,33.220001,29.753685,12863900
+2010-09-29,33.029999,33.180000,32.680000,33.009998,29.565594,9231200
+2010-09-30,33.189999,33.490002,32.830002,33.099998,29.646204,14459500
+2010-10-01,33.299999,33.580002,33.119999,33.340000,29.861162,12916300
+2010-10-04,33.230000,33.599998,33.080002,33.139999,29.682034,10708300
+2010-10-05,33.389999,33.990002,33.360001,33.830002,30.300037,14555400
+2010-10-06,33.840000,33.980000,33.439999,33.720001,30.201515,11339900
+2010-10-07,33.779999,33.980000,33.680000,33.910000,30.371687,9029800
+2010-10-08,33.970001,34.700001,33.799999,34.509998,30.909077,12971700
+2010-10-11,34.520000,34.740002,34.430000,34.570000,30.962816,5347500
+2010-10-12,34.360001,34.610001,33.959999,34.480000,30.882210,9077900
+2010-10-13,34.660000,35.200001,34.590000,34.910000,31.267342,14425700
+2010-10-14,34.790001,34.950001,34.580002,34.849998,31.213604,7358700
+2010-10-15,35.040001,35.150002,34.560001,34.880001,31.240475,9641200
+2010-10-18,34.830002,35.000000,34.500000,34.750000,31.124035,6926200
+2010-10-19,34.500000,34.599998,34.000000,34.240002,30.667252,9554500
+2010-10-20,34.270000,34.750000,34.270000,34.619999,31.007601,10107800
+2010-10-21,34.759998,35.040001,34.459999,34.680000,31.061338,16888400
+2010-10-22,34.669998,34.990002,34.299999,34.970001,31.321081,9598800
+2010-10-25,35.160000,35.720001,35.099998,35.459999,31.759947,11230400
+2010-10-26,35.130001,35.970001,34.990002,35.959999,32.207779,14800400
+2010-10-27,35.599998,35.959999,35.470001,35.919998,32.171951,11752600
+2010-10-28,35.950001,36.520000,35.939999,36.509998,32.700390,14226000
+2010-10-29,36.509998,36.509998,35.830002,36.130001,32.360043,12926000
+2010-11-01,36.209999,36.490002,35.759998,36.029999,32.270470,8454000
+2010-11-02,36.369999,36.450001,36.099998,36.110001,32.342129,16355300
+2010-11-03,36.119999,36.430000,35.950001,36.290001,32.503349,12050100
+2010-11-04,36.500000,37.160000,36.459999,37.029999,33.166134,16674900
+2010-11-05,36.779999,37.270000,36.779999,37.200001,33.318390,9712900
+2010-11-08,37.049999,37.139999,36.889999,37.060001,33.193001,6802600
+2010-11-09,37.189999,37.230000,36.680000,36.860001,33.013874,7600000
+2010-11-10,36.910000,37.000000,36.509998,36.990002,33.130310,7857200
+2010-11-11,36.880001,37.389999,35.150002,35.930000,32.180912,37821800
+2010-11-12,36.759998,38.000000,36.599998,37.750000,33.811005,39192700
+2010-11-15,37.419998,37.660000,37.139999,37.250000,33.363178,11658400
+2010-11-16,36.869999,37.290001,36.709999,36.919998,33.067608,12409800
+2010-11-17,36.799999,37.320000,36.700001,37.220001,33.336308,10600500
+2010-11-18,37.200001,37.779999,37.200001,37.580002,33.658749,9520400
+2010-11-19,37.099998,37.330002,36.750000,37.009998,33.148216,13691800
+2010-11-22,36.880001,37.049999,36.520000,36.950001,33.094482,6558800
+2010-11-23,36.529999,36.619999,36.000000,36.119999,32.351086,11925900
+2010-11-24,36.360001,36.980000,36.299999,36.869999,33.022827,9098200
+2010-11-26,36.509998,36.930000,36.430000,36.700001,32.870567,3490900
+2010-11-29,36.450001,36.709999,36.099998,36.419998,32.619781,8495200
+2010-11-30,36.040001,36.830002,36.020000,36.509998,32.700390,17578900
+2010-12-01,37.150002,37.410000,37.040001,37.119999,33.246738,15010900
+2010-12-02,37.060001,37.590000,37.049999,37.340000,33.443783,13967700
+2010-12-03,37.049999,37.669998,37.049999,37.590000,33.667702,6901500
+2010-12-06,37.430000,37.730000,37.279999,37.560001,33.640827,5086200
+2010-12-07,37.840000,37.980000,37.310001,37.330002,33.434826,8123100
+2010-12-08,37.470001,37.490002,36.939999,36.970001,33.112396,8561900
+2010-12-09,36.869999,37.000000,36.509998,36.740002,33.266323,6547200
+2010-12-10,36.860001,36.950001,36.599998,36.660000,33.193886,6748900
+2010-12-13,36.950001,37.480000,36.570000,37.130001,33.619446,12361700
+2010-12-14,37.189999,37.500000,37.139999,37.240002,33.719040,6954600
+2010-12-15,37.160000,37.369999,36.900002,36.950001,33.456467,7258400
+2010-12-16,37.020000,37.180000,36.900002,37.009998,33.510796,7649800
+2010-12-17,36.970001,37.169998,36.770000,37.049999,33.547012,11285400
+2010-12-20,37.029999,37.340000,36.840000,37.060001,33.556065,5114100
+2010-12-21,37.169998,37.419998,37.099998,37.330002,33.800541,4783900
+2010-12-22,37.430000,37.990002,37.340000,37.950001,34.361919,7229000
+2010-12-23,37.849998,37.939999,37.520000,37.700001,34.135555,4506000
+2010-12-27,37.549999,37.750000,37.299999,37.480000,33.936348,3518900
+2010-12-28,37.459999,37.520000,37.070000,37.360001,33.827705,3464300
+2010-12-29,37.450001,37.840000,37.310001,37.599998,34.045010,5097000
+2010-12-30,37.580002,37.650002,37.389999,37.480000,33.936348,3411800
+2010-12-31,37.369999,37.599998,37.209999,37.509998,33.963512,3650800
+2011-01-03,37.740002,38.000000,37.619999,37.820000,34.244205,7591000
+2011-01-04,37.930000,39.000000,37.849998,38.990002,35.303585,21974200
+2011-01-05,39.230000,40.000000,39.169998,39.959999,36.181873,22003400
+2011-01-06,39.689999,39.889999,39.439999,39.650002,35.901188,14738300
+2011-01-07,39.549999,39.680000,39.160000,39.450001,35.720104,6598700
+2011-01-10,39.009998,39.549999,38.930000,39.500000,35.765369,8772600
+2011-01-11,39.599998,39.810001,39.240002,39.400002,35.674828,6494800
+2011-01-12,39.480000,39.650002,39.040001,39.169998,35.466568,9586900
+2011-01-13,39.180000,39.360001,39.009998,39.259998,35.548061,7084100
+2011-01-14,39.099998,39.340000,39.000000,39.290001,35.575222,5214000
+2011-01-18,39.070000,39.509998,39.000000,39.389999,35.665768,7892000
+2011-01-19,39.090000,39.490002,38.959999,39.090000,35.394131,8371900
+2011-01-20,39.009998,39.389999,38.509998,39.169998,35.466568,8977900
+2011-01-21,39.290001,39.939999,39.279999,39.740002,35.982677,10931800
+2011-01-24,39.639999,39.950001,39.540001,39.939999,36.163761,7794300
+2011-01-25,39.790001,39.919998,39.470001,39.860001,36.091331,7920300
+2011-01-26,39.840000,39.930000,39.369999,39.439999,35.711037,9777100
+2011-01-27,39.540001,39.689999,39.250000,39.459999,35.729145,8231000
+2011-01-28,39.360001,39.790001,38.650002,38.849998,35.176823,8399900
+2011-01-31,39.040001,39.250000,38.650002,38.869999,35.194923,8206400
+2011-02-01,39.040001,39.950001,39.040001,39.880001,36.109444,9283000
+2011-02-02,39.799999,40.709999,39.799999,40.490002,36.661774,15357000
+2011-02-03,40.400002,40.750000,40.279999,40.500000,36.670822,9513800
+2011-02-04,40.470001,40.770000,40.410000,40.709999,36.860966,11142200
+2011-02-07,40.799999,41.200001,40.709999,40.939999,37.069221,7283900
+2011-02-08,40.900002,41.240002,40.790001,41.180000,37.286522,11850200
+2011-02-09,42.759998,44.049999,42.720001,43.360001,39.260414,35806100
+2011-02-10,42.779999,43.660000,42.730000,43.310001,39.215145,14829000
+2011-02-11,43.040001,43.509998,43.000000,43.410000,39.305683,14199200
+2011-02-14,43.189999,43.410000,42.980000,43.240002,39.151764,7500600
+2011-02-15,43.000000,43.570000,42.980000,43.090000,39.015945,12069100
+2011-02-16,43.209999,43.709999,43.049999,43.700001,39.568272,7902800
+2011-02-17,43.500000,43.900002,43.470001,43.700001,39.568272,6396000
+2011-02-18,43.650002,43.650002,43.259998,43.560001,39.441509,15918200
+2011-02-22,42.830002,43.150002,42.400002,42.650002,38.617538,14162700
+2011-02-23,42.910000,43.290001,41.970001,42.130001,38.146702,18834700
+2011-02-24,42.000000,42.470001,41.599998,42.419998,38.409283,14504800
+2011-02-25,42.500000,43.130001,42.400002,42.950001,38.889179,9457300
+2011-02-28,43.020000,43.869999,43.000000,43.740002,39.604485,12528200
+2011-03-01,43.630001,43.790001,42.970001,42.990002,38.925400,12093400
+2011-03-02,43.139999,43.619999,42.980000,43.290001,39.197029,7624800
+2011-03-03,43.610001,44.240002,43.610001,44.070000,39.903282,9641500
+2011-03-04,44.029999,44.340000,43.099998,43.549999,39.432446,11127700
+2011-03-07,43.529999,43.610001,42.380001,43.020000,38.952564,11765100
+2011-03-08,43.060001,43.580002,42.730000,43.200001,39.115540,6864300
+2011-03-09,43.240002,43.380001,42.810001,43.119999,39.043106,5386700
+2011-03-10,42.650002,42.830002,42.160000,42.470001,38.454563,10210600
+2011-03-11,42.430000,43.119999,42.259998,42.930000,38.871071,6954800
+2011-03-14,42.639999,42.680000,41.580002,42.240002,38.246311,11005600
+2011-03-15,41.130001,41.860001,40.630001,41.619999,37.684929,14572000
+2011-03-16,41.220001,41.580002,40.419998,40.599998,36.761364,16754900
+2011-03-17,41.189999,41.200001,40.480000,40.759998,36.906239,9378400
+2011-03-18,41.250000,41.669998,41.169998,41.230000,37.331799,13291500
+2011-03-21,41.459999,41.990002,40.990002,41.820000,37.866020,7130300
+2011-03-22,41.849998,41.950001,41.380001,41.439999,37.521946,6881000
+2011-03-23,41.400002,42.349998,40.869999,42.240002,38.246311,11705500
+2011-03-24,42.470001,42.990002,42.400002,42.860001,38.807690,7011100
+2011-03-25,42.939999,43.240002,42.779999,42.970001,38.907291,7935500
+2011-03-28,43.189999,43.299999,42.520000,42.520000,38.499836,6022900
+2011-03-29,42.580002,43.029999,42.360001,42.939999,38.880127,5452700
+2011-03-30,43.150002,43.480000,43.049999,43.349998,39.251362,7004800
+2011-03-31,43.060001,43.209999,42.700001,43.090000,39.015945,7206400
+2011-04-01,43.230000,43.380001,42.639999,42.849998,38.798637,8404800
+2011-04-04,42.869999,43.049999,42.509998,42.630001,38.599434,6841000
+2011-04-05,42.660000,42.830002,42.200001,42.430000,38.418343,7635700
+2011-04-06,42.720001,42.730000,42.060001,42.270000,38.273468,7826300
+2011-04-07,42.180000,42.270000,41.500000,42.040001,38.065216,9439900
+2011-04-08,42.220001,42.240002,41.400002,41.759998,37.811687,7271800
+2011-04-11,41.759998,41.939999,41.549999,41.889999,37.929401,6882700
+2011-04-12,41.590000,41.750000,41.250000,41.630001,37.693985,8387000
+2011-04-13,41.830002,42.020000,41.490002,41.700001,37.757366,5964800
+2011-04-14,41.500000,41.560001,40.840000,41.020000,37.141651,11204000
+2011-04-15,41.230000,41.709999,40.959999,41.520000,37.594372,8376900
+2011-04-18,40.970001,41.279999,40.459999,41.200001,37.304638,9975800
+2011-04-19,41.080002,41.459999,40.880001,41.349998,37.440456,6903700
+2011-04-20,41.880001,42.459999,41.759998,42.040001,38.065216,7400000
+2011-04-21,42.320000,42.400002,42.099998,42.270000,38.273468,4860800
+2011-04-25,42.340000,42.349998,41.869999,41.930000,37.965618,5157000
+2011-04-26,41.980000,42.439999,41.720001,42.330002,38.327801,7290800
+2011-04-27,42.259998,42.680000,42.180000,42.580002,38.554161,5396900
+2011-04-28,42.380001,43.150002,42.380001,43.020000,38.952564,6529800
+2011-04-29,43.040001,43.349998,42.810001,43.099998,39.024994,5740200
+2011-05-02,43.470001,43.790001,43.209999,43.270000,39.178921,5601100
+2011-05-03,43.250000,43.570000,42.889999,43.070000,38.997829,6206900
+2011-05-04,42.939999,43.240002,42.299999,42.619999,38.590378,7481400
+2011-05-05,42.509998,43.250000,42.439999,42.689999,38.653759,8242300
+2011-05-06,43.299999,43.500000,42.849998,43.060001,38.988785,7791900
+2011-05-09,43.320000,43.349998,42.930000,43.099998,39.024994,6308300
+2011-05-10,43.240002,44.130001,43.099998,43.910000,39.758411,12541000
+2011-05-11,42.110001,42.580002,41.110001,41.520000,37.594372,38451900
+2011-05-12,41.509998,41.759998,40.939999,41.580002,37.648705,13080100
+2011-05-13,41.610001,41.869999,41.250000,41.520000,37.594372,8420500
+2011-05-16,41.259998,41.299999,40.889999,40.930000,37.060165,11697800
+2011-05-17,40.860001,41.139999,40.680000,41.009998,37.132599,10024400
+2011-05-18,40.939999,41.450001,40.900002,41.419998,37.503830,9948300
+2011-05-19,41.549999,41.740002,41.139999,41.380001,37.467613,10055800
+2011-05-20,41.340000,41.840000,41.119999,41.500000,37.576271,9213000
+2011-05-23,41.150002,41.330002,40.950001,41.160000,37.268421,7955900
+2011-05-24,41.270000,41.340000,40.869999,41.090000,37.205040,8459300
+2011-05-25,40.919998,41.340000,40.849998,41.130001,37.241257,7030500
+2011-05-26,41.020000,41.220001,40.549999,40.990002,37.114494,11189400
+2011-05-27,41.189999,41.669998,41.189999,41.520000,37.594372,8529400
+2011-05-31,41.900002,41.990002,41.480000,41.630001,37.693985,10977200
+2011-06-01,41.529999,41.590000,40.320000,40.400002,36.580280,12632100
+2011-06-02,40.349998,40.689999,39.990002,40.060001,36.272423,13421400
+2011-06-03,39.660000,39.660000,39.169998,39.380001,35.656715,15135500
+2011-06-06,39.180000,39.700001,39.139999,39.400002,35.674828,9193400
+2011-06-07,39.610001,39.880001,39.330002,39.349998,35.629551,8666700
+2011-06-08,39.459999,39.650002,39.139999,39.230000,35.520897,8483000
+2011-06-09,38.950001,39.529999,38.849998,39.369999,35.647655,9642200
+2011-06-10,39.150002,39.330002,38.419998,38.500000,34.859917,13096800
+2011-06-13,38.630001,38.860001,38.349998,38.369999,34.742207,9253400
+2011-06-14,38.680000,38.860001,38.509998,38.590000,34.941402,10074600
+2011-06-15,38.380001,38.820000,38.240002,38.389999,34.760311,10716200
+2011-06-16,38.369999,38.470001,37.770000,38.009998,34.416241,11806900
+2011-06-17,38.310001,38.369999,37.980000,38.040001,34.443409,12658300
+2011-06-20,37.849998,38.270000,37.650002,38.230000,34.615444,8746200
+2011-06-21,38.450001,38.889999,38.250000,38.779999,35.113441,7815000
+2011-06-22,38.560001,38.770000,38.279999,38.310001,34.687881,8543700
+2011-06-23,37.880001,37.930000,37.189999,37.820000,34.244205,11874100
+2011-06-24,37.910000,38.020000,37.389999,37.580002,34.026901,13425400
+2011-06-27,37.619999,38.419998,37.470001,38.070000,34.470573,8258400
+2011-06-28,38.169998,38.450001,37.790001,37.919998,34.334755,11912700
+2011-06-29,38.110001,38.430000,37.730000,38.349998,34.724094,9614100
+2011-06-30,38.500000,39.119999,38.349998,39.040001,35.348854,10049800
+2011-07-01,39.150002,39.860001,39.029999,39.720001,35.964565,9245300
+2011-07-05,39.770000,40.180000,39.599998,39.970001,36.190933,9446200
+2011-07-06,39.950001,39.950001,39.270000,39.570000,35.828751,11435800
+2011-07-07,39.849998,40.060001,39.730000,39.740002,35.982677,10227600
+2011-07-08,39.980000,39.980000,39.200001,39.910000,36.136600,11795100
+2011-07-11,39.490002,39.540001,39.080002,39.220001,35.511845,9785500
+2011-07-12,39.340000,39.520000,39.029999,39.150002,35.448460,11311000
+2011-07-13,39.250000,39.830002,39.090000,39.560001,35.819695,12361400
+2011-07-14,39.529999,39.840000,39.389999,39.580002,35.837799,13294000
+2011-07-15,39.720001,39.820000,38.980000,39.270000,35.557106,18304900
+2011-07-18,39.119999,39.119999,38.459999,38.750000,35.086281,11309600
+2011-07-19,39.040001,39.730000,38.990002,39.540001,35.801590,7987500
+2011-07-20,39.580002,39.849998,39.279999,39.360001,35.638611,6126400
+2011-07-21,39.689999,40.869999,39.669998,40.740002,36.888126,13473000
+2011-07-22,40.779999,40.970001,40.540001,40.650002,36.806641,7723300
+2011-07-25,40.250000,40.889999,40.160000,40.560001,36.725151,6840900
+2011-07-26,40.380001,40.810001,40.369999,40.509998,36.679878,9292900
+2011-07-27,40.349998,40.369999,39.419998,39.520000,35.783474,10736600
+2011-07-28,39.459999,39.830002,39.189999,39.400002,35.674828,11028100
+2011-07-29,39.049999,39.310001,38.500000,38.619999,34.968567,16049700
+2011-08-01,38.730000,38.750000,37.820000,38.439999,34.805588,11497500
+2011-08-02,38.060001,38.430000,36.930000,36.970001,33.474571,13652300
+2011-08-03,36.939999,37.500000,36.470001,37.430000,33.891079,11804500
+2011-08-04,36.849998,36.889999,35.259998,35.349998,32.007732,19369200
+2011-08-05,35.790001,35.950001,34.400002,35.180000,31.853813,22781900
+2011-08-08,34.060001,34.570000,33.029999,33.029999,29.907089,25923500
+2011-08-09,33.470001,34.750000,32.310001,34.700001,31.419201,31836700
+2011-08-10,31.840000,32.270000,29.600000,31.540001,28.557968,87048500
+2011-08-11,31.830000,33.080002,31.809999,32.119999,29.083126,36389500
+2011-08-12,32.759998,33.560001,32.439999,33.090000,29.961416,24426900
+2011-08-15,33.430000,33.799999,33.090000,33.650002,30.468475,17839100
+2011-08-16,33.119999,33.669998,32.750000,33.419998,30.260216,18558300
+2011-08-17,33.610001,33.750000,32.840000,33.369999,30.214945,12741000
+2011-08-18,32.470001,32.689999,32.169998,32.549999,29.472473,23724700
+2011-08-19,31.950001,32.980000,31.820000,31.850000,28.838657,19006700
+2011-08-22,32.419998,32.650002,31.940001,32.020000,28.992584,15000100
+2011-08-23,32.070000,32.110001,31.330000,32.110001,29.074078,30417400
+2011-08-24,32.009998,32.580002,31.730000,32.509998,29.436253,12595700
+2011-08-25,32.529999,32.830002,31.879999,32.009998,28.983526,10799000
+2011-08-26,32.119999,32.639999,31.360001,32.400002,29.336655,11247100
+2011-08-29,32.720001,33.169998,32.610001,33.160000,30.024801,8397500
+2011-08-30,33.119999,33.869999,32.820000,33.580002,30.405094,14060100
+2011-08-31,33.849998,34.320000,33.740002,34.060001,30.839706,12282000
+2011-09-01,34.090000,34.330002,33.340000,33.380001,30.223997,10224700
+2011-09-02,32.840000,33.040001,32.410000,32.459999,29.390982,9144700
+2011-09-06,31.459999,31.940001,31.330000,31.760000,28.757166,12844800
+2011-09-07,32.150002,32.680000,31.990000,32.630001,29.544912,11471000
+2011-09-08,32.419998,32.660000,31.910000,31.950001,28.929199,10673200
+2011-09-09,31.639999,31.719999,30.730000,31.040001,28.105242,17509200
+2011-09-12,30.690001,31.309999,30.520000,31.290001,28.331606,10440000
+2011-09-13,31.360001,31.610001,31.020000,31.450001,28.476477,9792200
+2011-09-14,31.670000,32.730000,31.230000,32.290001,29.237057,12593100
+2011-09-15,32.580002,32.990002,32.430000,32.939999,29.825600,8466900
+2011-09-16,33.139999,33.290001,32.630001,32.910000,29.798435,16655300
+2011-09-19,32.380001,32.639999,32.060001,32.509998,29.436253,9979700
+2011-09-20,32.660000,32.880001,32.230000,32.310001,29.255165,10117700
+2011-09-21,32.330002,32.520000,31.240000,31.280001,28.322548,12618500
+2011-09-22,30.540001,30.900000,29.180000,29.549999,26.756115,26238400
+2011-09-23,29.459999,29.900000,29.049999,29.830000,27.009642,20944100
+2011-09-26,30.059999,30.340000,29.680000,30.299999,27.435204,14200600
+2011-09-27,30.990000,31.709999,30.590000,31.160000,28.213892,15723900
+2011-09-28,31.270000,31.420000,30.379999,30.480000,27.598188,11415000
+2011-09-29,31.150000,31.520000,30.230000,30.650000,27.752113,12114800
+2011-09-30,30.160000,30.709999,29.990000,30.160000,27.308443,15135000
+2011-10-03,30.030001,30.330000,28.990000,29.000000,26.258116,13899200
+2011-10-04,28.730000,29.969999,28.190001,29.860001,27.036808,19000000
+2011-10-05,30.350000,31.549999,30.309999,31.510000,28.530807,17823700
+2011-10-06,31.230000,32.049999,31.020000,32.029999,29.001638,12659400
+2011-10-07,32.200001,32.369999,31.610001,31.700001,28.702841,12070100
+2011-10-10,32.360001,33.099998,32.299999,33.000000,29.879930,7887400
+2011-10-11,33.110001,33.279999,32.240002,32.610001,29.526802,12891200
+2011-10-12,32.959999,34.430000,32.750000,33.759998,30.568066,17363400
+2011-10-13,33.389999,33.730000,33.270000,33.549999,30.377924,6837900
+2011-10-14,34.020000,34.529999,33.910000,34.470001,31.210943,10694700
+2011-10-17,33.830002,34.220001,33.180000,33.389999,30.233053,9987700
+2011-10-18,33.459999,34.200001,33.130001,33.939999,30.731047,10980600
+2011-10-19,33.880001,34.349998,33.470001,33.610001,30.432253,7479000
+2011-10-20,33.849998,34.119999,33.250000,33.840000,30.640509,7319800
+2011-10-21,34.180000,35.160000,34.090000,35.160000,31.835705,13707600
+2011-10-24,35.240002,35.709999,35.020000,35.369999,32.025852,8837900
+2011-10-25,35.200001,35.349998,34.410000,34.509998,31.247158,7812300
+2011-10-26,34.880001,35.189999,34.090000,35.049999,31.736105,10407200
+2011-10-27,35.790001,36.599998,35.630001,36.279999,32.849808,11316800
+2011-10-28,36.230000,36.340000,35.720001,36.209999,32.786430,8477600
+2011-10-31,35.720001,35.790001,34.860001,34.880001,31.582174,10152100
+2011-11-01,34.430000,34.470001,33.310001,33.599998,30.423199,13361700
+2011-11-02,34.130001,34.380001,33.799999,34.070000,30.848757,8538100
+2011-11-03,34.570000,35.130001,33.970001,35.029999,31.717995,9276100
+2011-11-04,34.810001,35.009998,34.330002,34.759998,31.473520,6430900
+2011-11-07,34.860001,35.169998,34.610001,35.150002,31.826645,6942700
+2011-11-08,35.439999,35.459999,34.340000,35.290001,31.953409,10710900
+2011-11-09,34.590000,34.590000,33.660000,33.790001,30.595232,11913800
+2011-11-10,34.160000,34.759998,33.650002,34.639999,31.364866,12170600
+2011-11-11,36.630001,37.419998,36.369999,36.700001,33.230099,22388500
+2011-11-14,36.330002,36.419998,35.880001,36.119999,32.704941,9631900
+2011-11-15,35.959999,37.000000,35.959999,36.450001,33.003738,14226700
+2011-11-16,35.959999,36.320000,35.470001,35.529999,32.170719,10809700
+2011-11-17,35.380001,35.860001,34.799999,35.150002,31.826645,32562000
+2011-11-18,35.389999,35.779999,35.240002,35.630001,32.261265,32200300
+2011-11-21,35.480000,35.630001,34.220001,34.330002,31.084183,39965700
+2011-11-22,34.209999,34.470001,33.410000,34.020000,30.803488,12500700
+2011-11-23,33.580002,33.779999,33.400002,33.400002,30.242111,10764000
+2011-11-25,33.500000,34.220001,33.279999,33.509998,30.341705,4991900
+2011-11-28,34.270000,34.529999,33.759998,34.070000,30.848757,8656400
+2011-11-29,34.340000,34.500000,33.970001,34.000000,30.785379,7655200
+2011-11-30,34.680000,35.889999,34.660000,35.849998,32.460461,16536000
+2011-12-01,36.029999,36.330002,35.720001,35.990002,32.587227,10350800
+2011-12-02,36.330002,36.830002,36.299999,36.610001,33.148613,10202500
+2011-12-05,37.040001,37.369999,36.730000,36.939999,33.447407,9973900
+2011-12-06,36.869999,37.200001,36.750000,36.830002,33.347813,6573100
+2011-12-07,36.709999,37.330002,36.459999,37.099998,33.592278,9543600
+2011-12-08,36.860001,36.959999,35.820000,35.919998,32.523849,9881700
+2011-12-09,36.169998,36.770000,36.169998,36.560001,33.103340,7323400
+2011-12-12,36.200001,36.770000,35.919998,36.650002,33.184830,10050800
+2011-12-13,36.689999,37.119999,36.180000,36.330002,32.895084,12708800
+2011-12-14,35.700001,36.000000,35.119999,35.160000,32.370308,10664000
+2011-12-15,35.520000,35.720001,34.990002,35.189999,32.397919,9876100
+2011-12-16,35.450001,35.740002,35.150002,35.320000,32.517612,20350200
+2011-12-19,35.389999,35.549999,34.509998,34.750000,31.992842,11436800
+2011-12-20,35.169998,36.380001,35.150002,36.169998,33.300175,10126200
+2011-12-21,36.230000,36.700001,35.750000,36.270000,33.392242,8226200
+2011-12-22,36.380001,37.000000,36.259998,36.950001,34.018280,8067600
+2011-12-23,37.130001,37.720001,37.110001,37.700001,34.708782,6788700
+2011-12-27,37.610001,37.799999,37.400002,37.610001,34.625923,5189500
+2011-12-28,37.650002,37.709999,37.169998,37.240002,34.285278,4634300
+2011-12-29,37.450001,37.799999,37.250000,37.709999,34.717987,5022500
+2011-12-30,37.730000,37.799999,37.360001,37.500000,34.524647,4776100
+2012-01-03,37.970001,38.459999,37.939999,38.310001,35.270378,9704200
+2012-01-04,38.189999,38.990002,38.119999,38.849998,35.767536,9889900
+2012-01-05,38.830002,39.580002,38.700001,39.500000,36.365959,14327100
+2012-01-06,39.549999,40.150002,39.450001,39.910000,36.743431,14401600
+2012-01-09,39.740002,40.250000,39.590000,39.750000,36.596123,12145200
+2012-01-10,39.480000,39.910000,39.160000,39.630001,36.485649,14070900
+2012-01-11,39.389999,39.529999,38.450001,38.700001,35.629440,16257600
+2012-01-12,38.619999,38.860001,38.410000,38.730000,35.657055,10073400
+2012-01-13,38.470001,38.590000,38.020000,38.400002,35.353241,9877700
+2012-01-17,38.689999,38.980000,38.410000,38.480000,35.426888,9142800
+2012-01-18,38.509998,39.099998,38.400002,39.020000,35.924042,9594500
+2012-01-19,39.009998,39.470001,39.000000,39.439999,36.310722,7731000
+2012-01-20,39.419998,39.439999,39.099998,39.310001,36.191036,7879900
+2012-01-23,39.380001,39.500000,38.959999,39.250000,36.135796,7124600
+2012-01-24,39.029999,39.270000,38.570000,39.250000,36.135796,9407000
+2012-01-25,39.119999,39.680000,38.959999,39.560001,36.421200,10054000
+2012-01-26,39.590000,39.750000,39.060001,39.349998,36.227859,7162600
+2012-01-27,39.270000,39.500000,38.980000,39.250000,36.135796,9588400
+2012-01-30,38.740002,39.110001,38.380001,38.990002,35.896423,8683200
+2012-01-31,39.080002,39.310001,38.660000,38.900002,35.813568,14020100
+2012-02-01,39.250000,39.669998,39.160000,39.330002,36.209450,8901000
+2012-02-02,39.220001,39.250000,38.560001,38.910000,35.822777,9076800
+2012-02-03,39.430000,40.000000,39.360001,40.000000,36.826290,11605300
+2012-02-06,40.000000,40.590000,39.959999,40.459999,37.249790,11536500
+2012-02-07,40.730000,41.150002,40.200001,40.980000,37.728531,20107300
+2012-02-08,41.369999,41.910000,40.990002,41.270000,37.995525,21690200
+2012-02-09,40.750000,41.639999,40.549999,41.529999,38.234894,13581000
+2012-02-10,40.939999,41.450001,40.830002,41.450001,38.161247,10116300
+2012-02-13,41.619999,41.919998,41.590000,41.790001,38.474266,9065800
+2012-02-14,41.500000,41.619999,41.259998,41.599998,38.299339,8210300
+2012-02-15,41.480000,41.750000,41.130001,41.250000,37.977108,7891200
+2012-02-16,41.230000,41.730000,41.080002,41.540001,38.244106,7464500
+2012-02-17,41.709999,41.860001,41.549999,41.750000,38.437439,7758700
+2012-02-21,41.799999,41.900002,41.490002,41.570000,38.271729,5675100
+2012-02-22,41.549999,41.709999,41.150002,41.270000,37.995525,6176200
+2012-02-23,41.279999,41.669998,41.090000,41.480000,38.188858,6271000
+2012-02-24,41.439999,41.580002,41.230000,41.310001,38.032349,6150600
+2012-02-27,41.700001,41.950001,41.299999,41.639999,38.336163,9132300
+2012-02-28,41.750000,41.970001,41.549999,41.930000,38.603157,6895200
+2012-02-29,41.869999,42.369999,41.849998,41.990002,38.658390,14858700
+2012-03-01,42.130001,42.540001,42.130001,42.389999,39.026665,7995900
+2012-03-02,42.259998,42.529999,42.040001,42.360001,38.999035,5070500
+2012-03-05,42.349998,42.770000,42.110001,42.700001,39.312065,9288800
+2012-03-06,42.240002,42.320000,41.730000,42.000000,38.667603,10434500
+2012-03-07,42.000000,42.070000,41.700001,41.750000,38.437439,7793500
+2012-03-08,41.990002,42.320000,41.950001,42.020000,38.686016,6163800
+2012-03-09,42.119999,42.570000,42.029999,42.240002,38.888557,6896400
+2012-03-12,42.529999,42.849998,42.150002,42.340000,38.980629,6834500
+2012-03-13,42.549999,44.080002,42.430000,44.009998,40.518127,15051900
+2012-03-14,43.590000,43.700001,42.900002,43.480000,40.030178,10991300
+2012-03-15,43.500000,43.500000,43.169998,43.470001,40.020973,6123600
+2012-03-16,43.419998,43.520000,43.180000,43.189999,39.763184,8952100
+2012-03-19,43.070000,43.580002,42.950001,43.439999,39.993347,6382500
+2012-03-20,43.000000,43.310001,42.930000,43.240002,39.809219,6853900
+2012-03-21,43.419998,43.869999,43.270000,43.270000,39.836838,9582000
+2012-03-22,43.080002,43.400002,43.009998,43.290001,39.855247,8560200
+2012-03-23,43.330002,43.709999,42.860001,43.650002,40.186691,8519000
+2012-03-26,43.939999,44.410000,43.919998,44.380001,40.858768,10850200
+2012-03-27,44.389999,44.500000,44.090000,44.150002,40.647018,9343100
+2012-03-28,44.040001,44.150002,43.259998,43.509998,40.057793,9444300
+2012-03-29,43.200001,43.270000,42.299999,43.009998,39.597462,10402500
+2012-03-30,43.520000,43.849998,43.099998,43.779999,40.306377,9096000
+2012-04-02,43.590000,44.000000,43.320000,43.840000,40.361618,7653800
+2012-04-03,43.669998,43.750000,42.910000,43.290001,39.855247,7996400
+2012-04-04,42.900002,42.990002,42.480000,42.930000,39.523811,8560700
+2012-04-05,42.840000,43.180000,42.680000,43.080002,39.661915,6340800
+2012-04-09,42.130001,42.270000,41.959999,42.110001,38.768879,6813500
+2012-04-10,41.869999,42.130001,40.880001,40.990002,37.737740,10682100
+2012-04-11,41.509998,41.700001,41.240002,41.369999,38.087593,6358400
+2012-04-12,41.430000,42.200001,41.250000,42.150002,38.805706,5538400
+2012-04-13,42.029999,42.220001,41.700001,41.849998,38.529499,7311700
+2012-04-16,42.060001,42.130001,41.540001,41.660000,38.354580,6109300
+2012-04-17,41.880001,42.740002,41.880001,42.680000,39.293659,6550300
+2012-04-18,42.480000,42.610001,42.320000,42.490002,39.118732,3781900
+2012-04-19,42.430000,42.509998,41.799999,42.080002,38.741261,7074200
+2012-04-20,42.240002,42.840000,42.240002,42.349998,38.989830,7773100
+2012-04-23,42.250000,42.250000,41.730000,42.009998,38.676804,6592100
+2012-04-24,42.029999,42.430000,41.970001,42.180000,38.833321,4954600
+2012-04-25,42.400002,42.759998,42.380001,42.700001,39.312065,4926300
+2012-04-26,42.740002,43.560001,42.689999,43.360001,39.919697,7713000
+2012-04-27,43.599998,43.650002,43.119999,43.349998,39.910484,6354200
+2012-04-30,43.320000,43.349998,42.840000,43.110001,39.689529,6987200
+2012-05-01,43.180000,44.090000,43.150002,43.790001,40.315582,9316200
+2012-05-02,43.570000,43.810001,43.299999,43.540001,40.085415,8038200
+2012-05-03,43.540001,44.130001,43.490002,43.810001,40.334000,9353900
+2012-05-04,43.599998,43.709999,42.840000,42.930000,39.523811,7458800
+2012-05-07,43.650002,43.950001,43.310001,43.820000,40.343201,11780500
+2012-05-08,43.779999,44.490002,43.090000,44.299999,40.785110,19680900
+2012-05-09,44.459999,45.799999,44.410000,45.020000,41.447990,21348300
+2012-05-10,45.139999,45.590000,45.049999,45.279999,41.687359,14082900
+2012-05-11,45.130001,45.799999,45.000000,45.560001,41.945145,9779300
+2012-05-14,45.169998,45.500000,44.980000,45.169998,41.586086,8059400
+2012-05-15,45.230000,45.799999,44.919998,45.009998,41.438782,10168800
+2012-05-16,45.150002,45.549999,45.060001,45.080002,41.503231,8399800
+2012-05-17,45.150002,45.230000,44.299999,44.330002,40.812729,8475100
+2012-05-18,44.459999,44.490002,43.619999,43.810001,40.334000,10598200
+2012-05-21,43.810001,44.410000,43.509998,44.389999,40.867973,8309300
+2012-05-22,44.490002,44.700001,44.130001,44.389999,40.867973,6855400
+2012-05-23,44.240002,44.459999,43.810001,44.209999,40.702263,8875700
+2012-05-24,44.410000,44.610001,44.009998,44.439999,40.914009,9813200
+2012-05-25,44.490002,44.959999,44.380001,44.500000,40.969242,8520700
+2012-05-29,44.700001,45.500000,44.599998,45.480000,41.871490,10560000
+2012-05-30,45.020000,45.480000,44.860001,45.200001,41.613712,11208700
+2012-05-31,45.130001,46.099998,45.060001,45.709999,42.083244,14646500
+2012-06-01,45.150002,45.349998,44.340000,44.400002,40.877182,14874500
+2012-06-04,44.470001,44.689999,44.139999,44.410000,40.886387,9480300
+2012-06-05,44.509998,44.919998,44.200001,44.840000,41.282265,11166600
+2012-06-06,45.020000,45.599998,45.000000,45.529999,41.917526,9494400
+2012-06-07,45.840000,46.220001,45.529999,45.630001,42.009590,10238900
+2012-06-08,45.700001,46.250000,45.590000,46.240002,42.571194,10716400
+2012-06-11,46.349998,46.630001,45.730000,45.799999,42.166100,7907400
+2012-06-12,45.750000,46.410000,45.580002,46.380001,42.700081,7742900
+2012-06-13,46.250000,46.650002,45.990002,46.230000,42.561981,7317600
+2012-06-14,46.400002,47.450001,46.209999,47.180000,43.436607,12553900
+2012-06-15,47.410000,47.430000,46.919998,47.090000,43.353752,11198200
+2012-06-18,46.959999,47.400002,46.799999,47.099998,43.362949,8821100
+2012-06-19,47.430000,48.000000,47.279999,47.509998,43.740421,8209000
+2012-06-20,47.639999,47.970001,47.430000,47.730000,43.942966,7264700
+2012-06-21,47.799999,47.889999,47.220001,47.400002,43.639153,10077700
+2012-06-22,47.389999,47.520000,46.910000,47.470001,43.703594,18881600
+2012-06-25,47.220001,47.230000,46.549999,46.700001,42.994698,6740600
+2012-06-26,46.880001,47.470001,46.820000,47.299999,43.547085,7279900
+2012-06-27,47.270000,48.080002,47.230000,47.869999,44.071861,6023500
+2012-06-28,47.669998,47.840000,46.980000,47.820000,44.025829,6674200
+2012-06-29,48.520000,48.950001,48.220001,48.500000,44.651875,10300500
+2012-07-02,48.619999,48.750000,48.240002,48.720001,44.854420,7275600
+2012-07-03,48.669998,48.750000,48.419998,48.590000,44.734737,3060800
+2012-07-05,48.279999,48.549999,47.830002,48.110001,44.292816,7770100
+2012-07-06,47.770000,48.180000,47.700001,48.040001,44.228371,5623100
+2012-07-09,48.060001,48.180000,47.759998,48.000000,44.191551,5596200
+2012-07-10,48.169998,48.380001,47.180000,47.360001,43.602329,8787500
+2012-07-11,47.450001,47.590000,46.970001,47.270000,43.519466,6501000
+2012-07-12,47.380001,47.799999,46.849998,47.410000,43.648357,8270700
+2012-07-13,47.400002,48.380001,47.380001,48.189999,44.366470,7187300
+2012-07-16,47.959999,48.119999,47.689999,47.860001,44.062660,5675200
+2012-07-17,48.439999,49.919998,48.400002,49.349998,45.434433,15909900
+2012-07-18,49.139999,49.669998,48.959999,49.349998,45.434433,7677500
+2012-07-19,49.389999,49.619999,48.730000,48.990002,45.103004,9330800
+2012-07-20,48.599998,48.950001,48.500000,48.590000,44.734737,8482300
+2012-07-23,47.770000,48.060001,47.419998,47.980000,44.173138,8333100
+2012-07-24,48.029999,48.330002,47.689999,48.160000,44.338856,8955100
+2012-07-25,48.330002,48.639999,48.009998,48.299999,44.467739,7034000
+2012-07-26,49.060001,49.869999,49.060001,49.709999,45.765869,10497100
+2012-07-27,49.889999,50.540001,49.689999,49.939999,45.977623,16174800
+2012-07-30,49.950001,50.139999,49.650002,49.799999,45.848728,8890600
+2012-07-31,49.779999,49.849998,49.090000,49.139999,45.241100,10503000
+2012-08-01,49.330002,49.529999,48.720001,48.820000,44.946487,6780200
+2012-08-02,48.599998,48.990002,48.130001,48.980000,45.093792,9559000
+2012-08-03,49.810001,49.869999,49.360001,49.770000,45.821110,8187800
+2012-08-06,49.970001,50.000000,49.650002,49.650002,45.710632,6353300
+2012-08-07,50.009998,50.310001,49.720001,49.810001,45.857937,13676500
+2012-08-08,49.360001,50.650002,49.049999,50.490002,46.483994,14724900
+2012-08-09,50.279999,50.470001,49.849998,49.959999,45.996037,10430300
+2012-08-10,49.900002,49.919998,49.320000,49.650002,45.710632,7449500
+2012-08-13,49.529999,49.990002,49.459999,49.860001,45.903973,4875000
+2012-08-14,49.930000,50.150002,49.529999,49.689999,45.747459,6365900
+2012-08-15,49.650002,49.970001,49.650002,49.889999,45.931595,4887100
+2012-08-16,49.990002,50.419998,49.799999,50.250000,46.263027,8285700
+2012-08-17,50.369999,50.549999,50.330002,50.459999,46.456360,6354700
+2012-08-20,50.320000,50.590000,50.150002,50.450001,46.447155,6937100
+2012-08-21,50.410000,50.480000,49.509998,49.639999,45.701424,7683200
+2012-08-22,49.599998,50.150002,49.500000,49.660000,45.719833,6759400
+2012-08-23,49.610001,49.660000,49.029999,49.169998,45.268715,5428300
+2012-08-24,49.080002,49.610001,49.009998,49.560001,45.627769,5793700
+2012-08-27,49.490002,49.849998,49.410000,49.630001,45.692215,6228800
+2012-08-28,49.439999,49.849998,49.290001,49.630001,45.692215,7962400
+2012-08-29,49.639999,50.169998,49.590000,50.040001,46.069687,7685100
+2012-08-30,49.720001,49.779999,49.369999,49.419998,45.498878,6697800
+2012-08-31,49.639999,49.900002,49.400002,49.470001,45.544914,6232500
+2012-09-04,49.520000,49.830002,49.230000,49.660000,45.719833,6988700
+2012-09-05,50.139999,51.240002,50.090000,50.790001,46.760178,16689300
+2012-09-06,50.700001,51.869999,50.639999,51.860001,47.745285,10124600
+2012-09-07,51.869999,52.000000,51.660000,51.740002,47.634808,8923600
+2012-09-10,51.610001,51.799999,51.480000,51.520000,47.432262,7479300
+2012-09-11,51.470001,51.669998,51.430000,51.560001,47.469090,6631700
+2012-09-12,51.599998,51.790001,51.270000,51.700001,47.597980,7100200
+2012-09-13,51.730000,52.669998,51.730000,52.599998,48.426567,11184700
+2012-09-14,52.450001,52.750000,52.169998,52.349998,48.196400,9507000
+2012-09-17,52.320000,52.430000,51.959999,52.150002,48.012276,7852900
+2012-09-18,52.029999,52.119999,51.770000,51.900002,47.782116,7100800
+2012-09-19,51.779999,52.950001,51.779999,52.700001,48.518635,7822500
+2012-09-20,52.430000,52.910000,52.430000,52.660000,48.481808,8598300
+2012-09-21,52.939999,53.389999,52.730000,52.740002,48.555466,13229300
+2012-09-24,52.669998,53.049999,52.610001,52.919998,48.721184,6148000
+2012-09-25,52.889999,53.400002,52.540001,52.540001,48.371334,7421200
+2012-09-26,52.419998,52.599998,51.770000,51.939999,47.818935,8017900
+2012-09-27,52.119999,52.779999,51.970001,52.580002,48.408161,5309400
+2012-09-28,52.410000,52.529999,52.049999,52.279999,48.131954,6695200
+2012-10-01,52.310001,52.860001,51.919998,52.070000,47.938622,6557200
+2012-10-02,52.130001,52.250000,51.470001,51.639999,47.542732,7243900
+2012-10-03,51.720001,52.689999,51.689999,52.470001,48.306889,6626600
+2012-10-04,52.720001,53.080002,52.389999,52.630001,48.454189,6507800
+2012-10-05,52.840000,53.150002,52.759998,52.970001,48.767212,5223200
+2012-10-08,52.369999,52.500000,52.110001,52.330002,48.177994,5827900
+2012-10-09,52.320000,52.330002,51.459999,51.509998,47.423054,8072100
+2012-10-10,51.389999,51.630001,51.090000,51.209999,47.146858,6407900
+2012-10-11,51.330002,51.560001,50.200001,50.340000,46.345882,10118400
+2012-10-12,50.549999,50.750000,50.419998,50.590000,46.576050,6135600
+2012-10-15,50.590000,50.810001,50.360001,50.790001,46.760178,6871200
+2012-10-16,51.169998,51.500000,51.080002,51.240002,47.174480,4997300
+2012-10-17,51.400002,52.400002,51.330002,52.290001,48.141167,7787900
+2012-10-18,52.259998,52.740002,52.119999,52.419998,48.260849,6290600
+2012-10-19,52.500000,52.590000,51.599998,51.900002,47.782116,8454800
+2012-10-22,51.810001,52.049999,51.400002,51.790001,47.680843,6131800
+2012-10-23,51.330002,51.349998,50.750000,50.759998,46.732559,8531000
+2012-10-24,50.950001,51.080002,50.560001,50.650002,46.631287,6069500
+2012-10-25,50.930000,51.029999,49.959999,50.259998,46.272232,6794100
+2012-10-26,50.360001,50.540001,49.520000,50.080002,46.106518,8359800
+2012-10-31,51.150002,51.240002,48.799999,49.119999,45.222683,20702900
+2012-11-01,49.279999,50.310001,49.279999,49.779999,45.830315,10582900
+2012-11-02,50.630001,50.990002,49.770000,49.860001,45.903973,14027500
+2012-11-05,50.810001,50.860001,49.820000,50.320000,46.327473,9575100
+2012-11-06,50.570000,50.700001,50.080002,50.470001,46.465572,7691200
+2012-11-07,50.650002,50.650002,49.590000,50.080002,46.106518,8533200
+2012-11-08,50.090000,50.840000,49.919998,50.040001,46.069687,11124200
+2012-11-09,46.990002,47.950001,46.529999,47.060001,43.326130,34216900
+2012-11-12,47.490002,47.630001,46.959999,47.450001,43.685188,12344500
+2012-11-13,47.279999,48.419998,47.279999,47.959999,44.154728,12269300
+2012-11-14,48.029999,48.040001,47.049999,47.169998,43.427391,9776400
+2012-11-15,47.240002,47.639999,46.959999,47.470001,43.703594,11416900
+2012-11-16,47.590000,47.759998,47.049999,47.419998,43.657566,9833400
+2012-11-19,47.700001,48.029999,47.450001,47.910000,44.108688,8512400
+2012-11-20,47.779999,48.500000,47.660000,48.439999,44.596634,7465100
+2012-11-21,48.500000,48.950001,48.400002,48.680000,44.817596,6763300
+2012-11-23,48.889999,49.279999,48.799999,49.259998,45.351574,3353600
+2012-11-26,48.980000,49.430000,48.910000,49.029999,45.139820,6935800
+2012-11-27,49.060001,49.200001,48.529999,48.599998,44.743938,6720400
+2012-11-28,48.470001,49.250000,47.840000,49.200001,45.296333,8148500
+2012-11-29,49.509998,49.849998,49.320000,49.720001,45.775078,7232200
+2012-11-30,49.810001,49.939999,49.250000,49.660000,45.719833,12343900
+2012-12-03,49.770000,50.000000,49.270000,49.290001,45.379196,8395400
+2012-12-04,49.250000,49.450001,48.849998,49.299999,45.388405,8400900
+2012-12-05,49.430000,49.900002,49.139999,49.590000,45.655392,9182300
+2012-12-06,48.779999,49.130001,48.560001,49.060001,45.861046,4828900
+2012-12-07,49.180000,49.380001,49.009998,49.240002,46.029316,5279200
+2012-12-10,49.209999,49.509998,49.189999,49.299999,46.085388,5264500
+2012-12-11,49.369999,49.990002,49.360001,49.480000,46.253662,9040000
+2012-12-12,49.759998,50.070000,49.419998,49.639999,46.403229,6474300
+2012-12-13,49.610001,49.689999,48.910000,49.110001,45.907784,6108000
+2012-12-14,49.009998,49.119999,48.549999,48.669998,45.496475,8086400
+2012-12-17,48.720001,49.320000,48.669998,49.279999,46.066704,10324500
+2012-12-18,49.349998,50.320000,49.150002,50.220001,46.945415,14023800
+2012-12-19,50.150002,50.509998,49.939999,49.939999,46.683666,8382100
+2012-12-20,49.830002,51.060001,49.830002,50.930000,47.609112,8410700
+2012-12-21,50.410000,50.750000,49.680000,50.000000,46.739754,15996900
+2012-12-24,49.919998,50.200001,49.840000,49.880001,46.627583,2772000
+2012-12-26,49.849998,49.980000,49.450001,49.849998,46.599537,4200200
+2012-12-27,49.799999,50.000000,48.900002,49.529999,46.300400,6948200
+2012-12-28,49.349998,49.700001,49.020000,49.150002,45.945183,6985800
+2012-12-31,48.889999,49.799999,48.799999,49.790001,46.543453,10224500
+2013-01-02,50.799999,51.250000,50.639999,51.099998,47.768032,11674700
+2013-01-03,51.049999,51.340000,50.840000,51.209999,47.870853,7566600
+2013-01-04,51.380001,52.340000,51.330002,52.189999,48.786957,8745900
+2013-01-07,51.759998,51.759998,50.660000,50.970001,47.646503,9472100
+2013-01-08,50.880001,51.180000,50.570000,50.759998,47.450199,8637600
+2013-01-09,50.930000,51.500000,50.759998,50.779999,47.468899,8389000
+2013-01-10,50.869999,51.240002,50.590000,50.790001,47.478245,8097400
+2013-01-11,50.820000,51.020000,50.450001,50.580002,47.281944,7149000
+2013-01-14,50.400002,50.830002,50.180000,50.590000,47.291279,9308800
+2013-01-15,50.470001,51.150002,50.430000,51.090000,47.758682,7299500
+2013-01-16,50.849998,51.779999,50.799999,51.529999,48.169987,7371800
+2013-01-17,51.840000,52.700001,51.840000,52.410000,48.992611,10132100
+2013-01-18,52.240002,52.340000,51.810001,52.340000,48.927174,8490900
+2013-01-22,52.389999,52.799999,52.180000,52.730000,49.291744,6635200
+2013-01-23,52.860001,54.160000,52.779999,53.950001,50.432194,13842700
+2013-01-24,54.029999,54.869999,53.919998,53.950001,50.432194,13608000
+2013-01-25,53.799999,54.560001,53.610001,54.380001,50.834160,7939400
+2013-01-28,54.279999,54.560001,54.099998,54.360001,50.815460,7133500
+2013-01-29,54.150002,54.209999,53.529999,53.990002,50.469593,7786400
+2013-01-30,53.849998,54.000000,53.549999,53.790001,50.282627,9821800
+2013-01-31,53.840000,54.250000,53.650002,53.880001,50.366760,7333900
+2013-02-01,54.180000,54.869999,54.150002,54.590000,51.030468,9552300
+2013-02-04,54.160000,54.500000,53.799999,53.900002,50.385452,7717400
+2013-02-05,54.360001,54.410000,54.000000,54.290001,50.750019,10246400
+2013-02-06,55.400002,55.500000,54.410000,54.520000,50.965027,26833600
+2013-02-07,54.070000,54.480000,53.410000,54.360001,50.815460,11723200
+2013-02-08,54.380001,54.720001,54.310001,54.660000,51.095901,9584300
+2013-02-11,54.650002,54.980000,54.590000,54.750000,51.180031,6358600
+2013-02-12,54.770000,55.049999,54.689999,54.950001,51.366993,8758100
+2013-02-13,55.080002,55.299999,54.669998,54.959999,51.376335,5701400
+2013-02-14,54.919998,55.169998,54.709999,54.880001,51.301552,8564500
+2013-02-15,55.049999,55.619999,54.889999,55.610001,51.983959,17783400
+2013-02-19,55.580002,55.950001,55.570000,55.730000,52.096130,7780500
+2013-02-20,55.619999,55.820000,54.560001,54.599998,51.039810,8460700
+2013-02-21,54.580002,54.820000,54.009998,54.169998,50.637848,6802500
+2013-02-22,54.389999,54.580002,53.930000,54.250000,50.712631,7603600
+2013-02-25,54.599998,54.790001,53.590000,53.590000,50.095665,9830200
+2013-02-26,53.810001,54.040001,53.599998,53.900002,50.385452,6238300
+2013-02-27,53.900002,54.660000,53.849998,54.480000,50.927635,7243500
+2013-02-28,54.540001,55.040001,54.389999,54.590000,51.030468,6605000
+2013-03-01,54.310001,55.369999,54.299999,55.330002,51.722214,8311500
+2013-03-04,55.160000,55.840000,55.000000,55.799999,52.161564,6357400
+2013-03-05,55.950001,56.580002,55.900002,56.480000,52.797226,7067900
+2013-03-06,56.529999,56.840000,56.250000,56.360001,52.685055,5572600
+2013-03-07,56.360001,56.470001,56.009998,56.320000,52.647659,4813700
+2013-03-08,56.560001,57.509998,56.520000,57.389999,53.647892,8157800
+2013-03-11,57.389999,57.750000,57.340000,57.660000,53.900288,7155200
+2013-03-12,57.520000,57.630001,56.700001,57.110001,53.386147,7159100
+2013-03-13,57.160000,57.439999,56.939999,57.340000,53.601154,5209800
+2013-03-14,57.349998,57.820000,57.270000,57.750000,53.984417,5908200
+2013-03-15,57.500000,57.779999,57.340000,57.580002,53.825504,13788100
+2013-03-18,57.080002,57.189999,56.750000,56.830002,53.124413,5125800
+2013-03-19,56.830002,57.049999,55.759998,56.310001,52.638313,8884200
+2013-03-20,56.590000,57.150002,56.540001,56.939999,53.227234,6018700
+2013-03-21,56.740002,56.849998,56.160000,56.310001,52.638313,5168500
+2013-03-22,56.650002,57.099998,56.520000,56.779999,53.077663,5500200
+2013-03-25,57.000000,57.110001,55.869999,56.209999,52.544834,7881500
+2013-03-26,56.410000,56.730000,56.330002,56.630001,52.937447,4675600
+2013-03-27,56.340000,56.500000,55.990002,56.470001,52.787876,6392400
+2013-03-28,56.590000,56.939999,56.500000,56.799999,53.096363,6705600
+2013-04-01,56.889999,57.139999,56.150002,56.689999,52.993530,6722400
+2013-04-02,57.040001,57.459999,56.959999,57.459999,53.713318,6575300
+2013-04-03,57.639999,57.750000,57.000000,57.250000,53.517017,8101000
+2013-04-04,57.279999,57.720001,57.180000,57.590000,53.834846,6412400
+2013-04-05,56.889999,57.730000,56.369999,57.700001,53.937675,7034700
+2013-04-08,57.830002,58.820000,57.759998,58.820000,54.984650,9796200
+2013-04-09,58.720001,59.240002,58.520000,59.139999,55.283779,8864800
+2013-04-10,59.369999,60.139999,59.340000,60.110001,56.190533,8438100
+2013-04-11,60.110001,60.669998,60.020000,60.549999,56.601841,9241800
+2013-04-12,60.290001,60.730000,60.270000,60.549999,56.601841,8533000
+2013-04-15,60.259998,60.669998,58.840000,58.880001,55.040733,12027900
+2013-04-16,59.259998,60.750000,59.080002,60.750000,56.788799,10309400
+2013-04-17,60.349998,60.689999,60.020000,60.680000,56.723373,11531200
+2013-04-18,60.639999,60.669998,59.880001,59.990002,56.078358,9203200
+2013-04-19,60.250000,61.560001,60.250000,61.560001,57.545986,9488300
+2013-04-22,61.590000,62.099998,61.150002,62.009998,57.966644,6306900
+2013-04-23,62.400002,62.619999,61.540001,62.590000,58.508831,6260800
+2013-04-24,62.709999,62.900002,61.590000,61.939999,57.901211,7335400
+2013-04-25,62.009998,62.169998,61.549999,62.000000,57.957298,6680900
+2013-04-26,62.060001,62.290001,61.549999,61.869999,57.835770,7038500
+2013-04-29,62.820000,63.250000,62.590000,63.000000,58.892094,7203900
+2013-04-30,62.930000,63.080002,62.439999,62.840000,58.742527,8528800
+2013-05-01,62.880001,63.500000,62.799999,63.209999,59.088398,7468900
+2013-05-02,63.400002,63.930000,63.110001,63.880001,59.714710,5249600
+2013-05-03,64.180000,64.849998,63.880001,64.800003,60.574722,9196000
+2013-05-06,65.089996,65.589996,64.510002,65.059998,60.817768,10239600
+2013-05-07,65.440002,66.089996,64.769997,66.070000,61.761909,14539100
+2013-05-08,66.000000,66.000000,64.559998,65.989998,61.687119,17803200
+2013-05-09,65.730003,67.110001,65.510002,66.669998,62.322781,11325000
+2013-05-10,66.690002,67.430000,66.339996,67.199997,62.818226,8654000
+2013-05-13,66.959999,67.519997,66.919998,67.320000,62.930412,5195000
+2013-05-14,67.430000,67.489998,66.980003,67.470001,63.070621,9675400
+2013-05-15,67.419998,67.790001,67.180000,67.669998,63.257591,7925900
+2013-05-16,67.620003,67.889999,66.199997,66.470001,62.135830,8473100
+2013-05-17,66.160004,66.580002,65.680000,66.580002,62.238663,9160100
+2013-05-20,66.580002,66.599998,66.050003,66.120003,61.808651,5893300
+2013-05-21,66.349998,66.400002,65.709999,65.830002,61.537567,7359100
+2013-05-22,65.760002,66.709999,65.269997,65.570000,61.294506,8142400
+2013-05-23,64.870003,65.440002,64.239998,65.230003,60.976688,7536200
+2013-05-24,64.750000,65.529999,64.389999,65.489998,61.219727,8846100
+2013-05-28,65.980003,66.760002,65.919998,66.690002,62.341484,8280000
+2013-05-29,66.370003,66.480003,65.739998,66.260002,61.939529,6954000
+2013-05-30,66.440002,67.110001,64.320000,64.650002,60.434505,13052700
+2013-05-31,64.570000,65.029999,63.070000,63.080002,58.966877,11280900
+2013-06-03,63.060001,63.820000,62.529999,63.799999,59.639923,8705600
+2013-06-04,63.930000,64.930000,63.860001,64.349998,60.154060,8147700
+2013-06-05,64.050003,64.120003,62.799999,63.119999,59.004265,8893600
+2013-06-06,63.099998,63.320000,62.040001,63.139999,59.022961,8661900
+2013-06-07,63.630001,64.870003,63.250000,64.849998,60.621456,7611300
+2013-06-10,64.910004,64.970001,63.700001,63.830002,59.667965,6729500
+2013-06-11,63.320000,64.300003,62.970001,63.799999,59.639923,6454800
+2013-06-12,64.320000,64.449997,62.869999,62.990002,58.882740,7663200
+2013-06-13,62.990002,64.400002,62.520000,64.300003,60.107330,5584900
+2013-06-14,64.010002,64.430000,63.630001,63.799999,59.639923,5311800
+2013-06-17,64.300003,65.129997,64.250000,64.489998,60.284935,6752000
+2013-06-18,64.610001,65.489998,64.559998,65.290001,61.032764,5234400
+2013-06-19,65.190002,65.550003,64.330002,64.330002,60.135368,6871400
+2013-06-20,63.220001,63.520000,61.820000,61.980000,57.938599,12260800
+2013-06-21,62.619999,63.080002,61.820000,62.730000,58.639698,11892400
+2013-06-24,62.250000,62.990002,61.820000,62.439999,58.368603,6494500
+2013-06-25,62.880001,63.099998,62.500000,62.570000,58.490128,6776700
+2013-06-26,62.950001,63.459999,62.750000,63.209999,59.088398,5720400
+2013-06-27,63.650002,63.970001,63.450001,63.720001,59.565144,4875900
+2013-06-28,63.759998,64.519997,63.150002,63.150002,59.032314,10560300
+2013-07-01,63.840000,64.919998,63.630001,63.930000,59.761452,8221600
+2013-07-02,63.830002,64.139999,63.080002,63.259998,59.135139,7690500
+2013-07-03,62.849998,63.830002,62.570000,63.610001,59.462315,3351800
+2013-07-05,63.880001,63.950001,63.110001,63.820000,59.658623,5645800
+2013-07-08,64.279999,64.790001,64.160004,64.709999,60.490589,6679700
+2013-07-09,65.320000,65.540001,64.650002,64.940002,60.705593,4556400
+2013-07-10,64.860001,65.300003,64.570000,64.910004,60.677547,5761700
+2013-07-11,65.650002,66.830002,65.650002,66.580002,62.238663,8329800
+2013-07-12,66.389999,67.000000,65.930000,66.980003,62.612583,5889900
+2013-07-15,67.019997,67.360001,65.889999,65.930000,61.631042,6458000
+2013-07-16,65.949997,66.099998,64.930000,65.029999,60.789726,7470900
+2013-07-17,65.419998,65.459999,64.760002,65.360001,61.098209,6846700
+2013-07-18,65.699997,66.370003,65.660004,65.820000,61.528210,4962800
+2013-07-19,65.889999,66.029999,64.910004,65.160004,60.911255,7220400
+2013-07-22,65.150002,65.330002,64.260002,64.400002,60.200806,5973600
+2013-07-23,64.599998,64.769997,64.330002,64.449997,60.247536,4534500
+2013-07-24,64.589996,65.120003,64.529999,64.639999,60.425156,5326800
+2013-07-25,64.320000,64.739998,64.260002,64.550003,60.341022,5407800
+2013-07-26,64.470001,65.070000,64.110001,64.980003,60.742992,4949600
+2013-07-29,64.800003,65.029999,64.470001,64.610001,60.397110,3805800
+2013-07-30,64.699997,64.870003,64.099998,64.180000,59.995148,6210500
+2013-07-31,64.430000,65.120003,64.379997,64.650002,60.434505,7155300
+2013-08-01,65.160004,65.870003,65.129997,65.360001,61.098209,6338100
+2013-08-02,65.559998,66.589996,65.330002,66.510002,62.173218,6098500
+2013-08-05,66.500000,66.540001,65.610001,66.019997,61.715176,7265800
+2013-08-06,66.089996,67.160004,65.970001,67.050003,62.678024,11546300
+2013-08-07,64.930000,66.300003,64.120003,65.910004,61.612347,16970500
+2013-08-08,66.339996,66.459999,65.199997,65.750000,61.462772,8826400
+2013-08-09,65.470001,65.750000,64.599998,64.730003,60.509281,7945500
+2013-08-12,64.260002,64.430000,63.669998,63.930000,59.761452,8004900
+2013-08-13,64.260002,64.260002,63.410000,63.980000,59.808193,6025100
+2013-08-14,63.950001,64.169998,63.470001,63.959999,59.789490,6413700
+2013-08-15,63.279999,63.320000,62.200001,62.380001,58.312519,9014100
+2013-08-16,61.970001,62.500000,61.709999,62.169998,58.116207,8238800
+2013-08-19,62.150002,62.180000,61.830002,61.830002,57.798378,5659900
+2013-08-20,61.790001,62.389999,61.790001,61.880001,57.845116,5192100
+2013-08-21,61.619999,61.880001,61.090000,61.139999,57.153374,7185400
+2013-08-22,61.320000,61.820000,61.160000,61.639999,57.620773,5302300
+2013-08-23,61.959999,61.990002,61.459999,61.730000,57.704899,5505900
+2013-08-26,61.759998,62.110001,61.330002,61.349998,57.349682,4499100
+2013-08-27,60.900002,61.349998,60.570000,60.689999,56.732716,7734500
+2013-08-28,60.439999,61.150002,60.410000,60.810001,56.844891,7161700
+2013-08-29,60.770000,61.520000,60.549999,61.080002,57.097290,5396200
+2013-08-30,61.139999,61.270000,60.549999,60.830002,56.863586,5404100
+2013-09-03,61.419998,61.990002,60.599998,60.930000,56.957066,7016400
+2013-09-04,60.919998,61.220001,60.520000,61.110001,57.125328,14014600
+2013-09-05,61.070000,61.599998,60.959999,61.299999,57.302948,6356600
+2013-09-06,61.470001,61.830002,60.630001,61.389999,57.387070,7319400
+2013-09-09,61.599998,61.840000,61.270000,61.590000,57.574028,7224500
+2013-09-10,62.090000,62.970001,61.930000,62.830002,58.733177,9919600
+2013-09-11,63.119999,63.939999,62.980000,63.939999,59.770794,9439700
+2013-09-12,63.799999,66.349998,63.410000,65.489998,61.219727,22426500
+2013-09-13,65.870003,67.029999,65.500000,66.690002,62.341484,12718700
+2013-09-16,67.320000,67.650002,66.879997,66.940002,62.575195,9716500
+2013-09-17,66.830002,67.199997,66.720001,66.980003,62.612583,6450700
+2013-09-18,66.669998,67.370003,66.209999,67.110001,62.734100,9865600
+2013-09-19,66.389999,66.500000,65.400002,65.720001,61.434742,11864800
+2013-09-20,65.610001,65.839996,64.910004,65.010002,60.771027,29076200
+2013-09-23,65.040001,65.480003,64.680000,64.750000,60.527981,8286600
+2013-09-24,64.940002,65.160004,64.279999,64.320000,60.126022,8272800
+2013-09-25,64.419998,64.930000,64.199997,64.449997,60.247536,7281400
+2013-09-26,64.360001,65.290001,64.250000,65.239998,60.986027,6971200
+2013-09-27,64.970001,65.440002,64.690002,65.190002,60.939293,5586000
+2013-09-30,64.730003,64.949997,64.389999,64.489998,60.284935,7463200
+2013-10-01,64.370003,65.250000,64.360001,64.830002,60.602764,4881200
+2013-10-02,64.449997,65.339996,64.400002,64.879997,60.649502,7218800
+2013-10-03,64.720001,64.930000,63.830002,64.019997,59.845577,7264100
+2013-10-04,64.019997,65.430000,63.970001,65.300003,61.042126,6546000
+2013-10-07,64.809998,65.309998,64.570000,64.589996,60.378410,5840900
+2013-10-08,64.519997,64.769997,63.970001,64.000000,59.826885,8636300
+2013-10-09,64.019997,64.180000,63.099998,63.590000,59.443619,8746900
+2013-10-10,64.480003,65.669998,64.290001,65.580002,61.303864,7676600
+2013-10-11,65.660004,66.589996,65.500000,66.209999,61.892780,6735000
+2013-10-14,65.879997,66.980003,65.779999,66.830002,62.472355,6461500
+2013-10-15,66.730003,66.940002,65.930000,66.440002,62.107784,5943100
+2013-10-16,66.639999,66.860001,66.129997,66.349998,62.023659,7724800
+2013-10-17,66.190002,66.589996,65.980003,66.419998,62.089100,6952100
+2013-10-18,66.500000,67.269997,65.989998,67.150002,62.771488,7587000
+2013-10-21,67.269997,67.629997,67.160004,67.610001,63.201500,4845800
+2013-10-22,67.940002,69.129997,67.750000,69.000000,64.500854,7860700
+2013-10-23,68.809998,69.199997,68.019997,68.120003,63.678234,5864500
+2013-10-24,68.180000,69.190002,68.019997,69.050003,64.547600,6045400
+2013-10-25,69.320000,69.870003,68.970001,69.260002,64.743904,6613300
+2013-10-28,69.349998,69.480003,68.919998,69.000000,64.500854,4469200
+2013-10-29,69.120003,69.389999,68.779999,68.919998,64.426079,4450200
+2013-10-30,69.220001,69.300003,67.930000,68.470001,64.005417,5816100
+2013-10-31,68.470001,69.300003,68.349998,68.589996,64.117592,5706100
+2013-11-01,68.709999,69.180000,68.500000,69.010002,64.510216,5008600
+2013-11-04,69.410004,69.550003,68.410004,68.809998,64.323257,4790200
+2013-11-05,68.769997,69.059998,68.059998,68.849998,64.360634,5140600
+2013-11-06,69.180000,69.620003,68.620003,69.000000,64.500854,6503200
+2013-11-07,69.339996,69.339996,67.089996,67.150002,62.771488,10496300
+2013-11-08,66.870003,69.410004,66.720001,68.580002,64.108246,16182800
+2013-11-11,68.480003,68.739998,68.190002,68.339996,63.883896,4954000
+2013-11-12,68.330002,68.449997,67.419998,67.769997,63.351067,6586700
+2013-11-13,67.430000,68.989998,67.300003,68.970001,64.472824,5681000
+2013-11-14,69.139999,70.150002,69.000000,70.019997,65.454338,7246600
+2013-11-15,69.769997,70.169998,69.550003,70.000000,65.435654,6184400
+2013-11-18,70.059998,70.089996,69.330002,69.500000,64.968269,4904400
+2013-11-19,69.489998,69.720001,69.040001,69.120003,64.613037,4801700
+2013-11-20,69.120003,69.919998,69.080002,69.230003,64.715874,4726800
+2013-11-21,69.459999,70.010002,69.269997,69.940002,65.379578,4498300
+2013-11-22,69.949997,70.330002,69.800003,70.199997,65.622612,4667500
+2013-11-25,70.540001,70.580002,69.599998,69.739998,65.192604,5830600
+2013-11-26,69.919998,71.690002,69.900002,71.180000,66.538719,8850000
+2013-11-27,71.209999,71.250000,70.459999,70.769997,66.155449,5446100
+2013-11-29,71.040001,71.269997,70.389999,70.540001,65.940445,4801000
+2013-12-02,70.790001,71.309998,70.760002,70.910004,66.286324,6774600
+2013-12-03,70.220001,70.410004,69.379997,69.900002,65.342171,8304700
+2013-12-04,69.419998,70.480003,69.320000,69.970001,65.407600,4883800
+2013-12-05,70.290001,70.680000,70.080002,70.230003,65.650658,5587000
+2013-12-06,70.970001,71.489998,70.769997,71.459999,66.800461,5542000
+2013-12-09,71.360001,71.480003,70.919998,71.110001,66.473289,6477100
+2013-12-10,70.910004,71.650002,70.900002,71.570000,66.903290,7112200
+2013-12-11,71.730003,72.129997,70.430000,70.519997,65.921745,9519200
+2013-12-12,69.279999,70.019997,68.800003,69.629997,65.893364,8756300
+2013-12-13,69.900002,70.129997,69.279999,69.620003,65.883904,5784300
+2013-12-16,69.910004,70.849998,69.910004,70.510002,66.726143,6246600
+2013-12-17,70.690002,70.910004,69.830002,70.660004,66.868088,6829100
+2013-12-18,70.959999,72.199997,70.139999,72.199997,68.325439,7971400
+2013-12-19,72.139999,73.080002,72.099998,72.970001,69.054115,6768100
+2013-12-20,73.099998,73.559998,72.339996,72.400002,68.514702,12989700
+2013-12-23,72.889999,73.330002,72.660004,73.279999,69.347488,5621900
+2013-12-24,73.250000,73.860001,73.250000,73.849998,69.886887,2883600
+2013-12-26,73.949997,74.690002,73.910004,74.620003,70.615578,4620100
+2013-12-27,74.650002,74.779999,73.930000,74.349998,70.360054,3844800
+2013-12-30,74.989998,76.540001,74.889999,76.230003,72.139183,8578900
+2013-12-31,76.279999,76.500000,75.809998,76.400002,72.300056,4692300
+2014-01-02,76.040001,76.500000,75.900002,76.269997,72.177032,5200900
+2014-01-03,76.300003,76.699997,76.010002,76.110001,72.025612,4060700
+2014-01-06,76.529999,76.839996,75.489998,75.820000,71.751183,6816200
+2014-01-07,76.120003,76.699997,75.900002,76.339996,72.243271,4494900
+2014-01-08,76.129997,76.279999,74.809998,75.220001,71.183372,10900300
+2014-01-09,75.459999,75.459999,74.320000,74.900002,70.880547,8052900
+2014-01-10,75.080002,75.510002,74.559998,75.389999,71.344254,6219000
+2014-01-13,74.900002,75.129997,73.050003,73.269997,69.338013,8232800
+2014-01-14,73.709999,74.580002,73.290001,74.449997,70.454697,5996500
+2014-01-15,74.300003,75.099998,74.250000,74.279999,70.293816,7283900
+2014-01-16,74.389999,74.400002,73.720001,74.209999,70.227577,5597400
+2014-01-17,74.430000,74.629997,73.680000,73.980003,70.009918,6624800
+2014-01-21,73.989998,74.879997,73.300003,74.199997,70.218109,7507100
+2014-01-22,74.080002,75.430000,74.050003,75.309998,71.268539,6795100
+2014-01-23,74.529999,74.949997,74.150002,74.790001,70.776459,6280900
+2014-01-24,74.099998,74.230003,72.720001,72.720001,68.817528,7334900
+2014-01-27,72.470001,72.910004,71.720001,72.250000,68.372765,7415100
+2014-01-28,72.260002,73.000000,72.239998,72.879997,68.968948,5508400
+2014-01-29,71.970001,72.239998,71.120003,71.330002,67.502129,8604800
+2014-01-30,72.089996,73.629997,72.000000,73.220001,69.290703,7550500
+2014-01-31,72.199997,73.320000,71.800003,72.610001,68.713440,7748500
+2014-02-03,72.660004,72.730003,69.849998,69.989998,66.234039,11964800
+2014-02-04,70.940002,71.250000,69.879997,71.050003,67.237160,11278800
+2014-02-05,72.050003,72.050003,70.370003,71.760002,67.909058,11573300
+2014-02-06,75.610001,76.730003,74.779999,75.559998,71.505135,19821900
+2014-02-07,75.080002,75.699997,74.610001,75.669998,71.609215,11480100
+2014-02-10,75.669998,77.309998,75.570000,77.059998,72.924629,8781600
+2014-02-11,77.150002,77.919998,77.110001,77.790001,73.615456,8186200
+2014-02-12,77.660004,78.010002,77.639999,77.910004,73.729027,6329500
+2014-02-13,77.519997,78.239998,77.199997,77.900002,73.719551,5955400
+2014-02-14,77.879997,79.470001,77.769997,79.230003,74.978180,7001100
+2014-02-18,79.500000,80.000000,79.430000,79.580002,75.309418,6136500
+2014-02-19,79.500000,79.779999,78.800003,78.870003,74.637505,7024000
+2014-02-20,79.209999,79.650002,78.839996,79.190002,74.940323,5748700
+2014-02-21,79.070000,80.449997,78.800003,80.129997,75.829887,8252300
+2014-02-24,80.500000,81.589996,80.459999,80.730003,76.397682,6635200
+2014-02-25,80.720001,81.349998,80.040001,80.209999,75.905586,6064300
+2014-02-26,80.629997,80.949997,79.589996,80.080002,75.782570,5775600
+2014-02-27,80.000000,80.589996,79.860001,80.480003,76.161102,4919800
+2014-02-28,80.839996,81.589996,80.169998,80.809998,76.473389,7030700
+2014-03-03,80.300003,80.650002,78.849998,79.459999,75.195839,8449400
+2014-03-04,80.599998,82.169998,80.540001,81.709999,77.325104,8021400
+2014-03-05,82.220001,83.209999,82.019997,82.680000,78.243042,7709700
+2014-03-06,82.860001,83.419998,82.839996,83.339996,78.867615,5774400
+2014-03-07,83.599998,83.650002,82.019997,82.209999,77.798264,7478100
+2014-03-10,82.180000,82.430000,81.690002,81.970001,77.571136,5209700
+2014-03-11,82.190002,82.330002,80.720001,81.050003,76.700516,7171600
+2014-03-12,80.589996,81.389999,80.050003,81.379997,77.012802,6378300
+2014-03-13,81.750000,81.980003,79.660004,79.930000,75.640625,8570600
+2014-03-14,79.849998,80.410004,79.570000,80.070000,75.773109,7754700
+2014-03-17,80.500000,81.599998,80.309998,81.389999,77.022263,7280700
+2014-03-18,81.529999,82.250000,81.279999,81.989998,77.590065,5395900
+2014-03-19,82.250000,82.300003,80.040001,80.519997,76.198944,6403200
+2014-03-20,80.589996,81.019997,79.760002,80.809998,76.473389,5968200
+2014-03-21,81.959999,82.050003,80.050003,80.349998,76.038086,7840900
+2014-03-24,80.709999,80.930000,79.379997,79.489998,75.224228,7642900
+2014-03-25,80.239998,80.339996,78.599998,79.550003,75.281006,6692400
+2014-03-26,79.930000,80.239998,78.610001,78.620003,74.400925,6750800
+2014-03-27,78.300003,78.610001,77.279999,78.480003,74.268425,9194700
+2014-03-28,78.730003,79.459999,78.309998,78.989998,74.751053,5281700
+2014-03-31,79.699997,80.389999,79.660004,80.070000,75.773109,6848800
+2014-04-01,80.389999,81.610001,80.389999,81.570000,77.192612,6555000
+2014-04-02,81.720001,81.970001,81.300003,81.669998,77.287247,6073500
+2014-04-03,81.919998,82.239998,81.139999,81.690002,77.306168,4930100
+2014-04-04,82.320000,82.849998,80.400002,80.430000,76.113785,8894400
+2014-04-07,80.419998,80.800003,78.559998,79.129997,74.883553,9000800
+2014-04-08,79.269997,79.650002,78.120003,79.570000,75.299934,7085100
+2014-04-09,79.970001,80.529999,79.260002,80.470001,76.151649,7004800
+2014-04-10,80.919998,80.919998,77.480003,77.510002,73.350479,10160200
+2014-04-11,77.510002,77.949997,76.809998,77.010002,72.877312,9204100
+2014-04-14,77.699997,77.830002,76.849998,77.620003,73.454590,5964700
+2014-04-15,77.889999,78.209999,76.309998,77.660004,73.492432,6398700
+2014-04-16,78.379997,78.980003,78.190002,78.949997,74.713203,5958800
+2014-04-17,78.699997,80.220001,78.599998,79.989998,75.697403,7627700
+2014-04-21,79.959999,79.980003,78.690002,79.110001,74.864632,7080300
+2014-04-22,79.220001,79.889999,79.209999,79.449997,75.186378,5876000
+2014-04-23,79.529999,79.949997,78.970001,79.519997,75.252617,4148600
+2014-04-24,79.739998,80.059998,79.110001,79.589996,75.318855,4632700
+2014-04-25,79.459999,79.459999,78.160004,78.230003,74.031845,6193300
+2014-04-28,78.760002,79.059998,76.879997,77.779999,73.606003,7522700
+2014-04-29,78.349998,78.680000,77.739998,78.639999,74.419846,4608700
+2014-04-30,78.500000,79.529999,78.180000,79.339996,75.082268,6550200
+2014-05-01,79.349998,79.870003,79.260002,79.559998,75.290474,6486800
+2014-05-02,79.620003,80.550003,79.620003,80.309998,76.000221,7599300
+2014-05-05,80.419998,81.309998,79.800003,81.220001,76.861389,5669800
+2014-05-06,81.589996,81.629997,80.800003,81.029999,76.681587,10696500
+2014-05-07,81.650002,82.440002,80.120003,80.290001,75.981293,11906800
+2014-05-08,80.209999,82.269997,80.070000,81.599998,77.221001,9257700
+2014-05-09,81.269997,82.019997,80.900002,81.949997,77.552208,5376600
+2014-05-12,82.080002,82.940002,82.080002,82.419998,77.996994,5704400
+2014-05-13,82.430000,82.750000,81.750000,82.080002,77.675247,5274700
+2014-05-14,81.769997,82.180000,80.629997,80.919998,76.577484,6192000
+2014-05-15,80.639999,80.830002,79.209999,80.150002,75.848808,7687700
+2014-05-16,80.169998,80.419998,79.339996,80.389999,76.075935,5823700
+2014-05-19,80.430000,81.379997,80.139999,81.050003,76.700516,4896900
+2014-05-20,80.940002,81.760002,80.519997,81.089996,76.738365,6023200
+2014-05-21,81.419998,82.339996,81.419998,82.190002,77.779343,4736400
+2014-05-22,82.190002,82.790001,82.040001,82.349998,77.930748,4652000
+2014-05-23,82.190002,83.360001,82.190002,83.320000,78.848694,4580300
+2014-05-27,83.559998,83.980003,83.320000,83.739998,79.246147,5197700
+2014-05-28,83.800003,83.900002,83.250000,83.599998,79.113670,4652100
+2014-05-29,83.919998,84.089996,83.430000,84.029999,79.520584,6522000
+2014-05-30,83.809998,84.389999,83.620003,84.010002,79.501671,6352400
+2014-06-02,84.269997,84.419998,84.010002,84.269997,79.747719,3933800
+2014-06-03,84.070000,84.230003,83.720001,83.879997,79.378639,4541400
+2014-06-04,83.849998,84.400002,83.309998,84.239998,79.719322,5800100
+2014-06-05,84.370003,84.949997,83.900002,84.779999,80.230347,5956000
+2014-06-06,84.910004,84.989998,84.269997,84.610001,80.069473,5302800
+2014-06-09,84.599998,85.860001,84.529999,85.480003,80.892784,5667600
+2014-06-10,85.400002,85.400002,84.699997,84.750000,80.201965,4632600
+2014-06-11,84.599998,84.809998,84.169998,84.309998,79.785568,4300900
+2014-06-12,84.029999,84.199997,82.599998,82.800003,78.356613,7231400
+2014-06-13,82.750000,82.989998,81.800003,82.800003,78.356613,4711100
+2014-06-16,82.639999,83.440002,82.559998,83.300003,78.829773,5181100
+2014-06-17,82.980003,83.459999,82.610001,83.160004,78.697289,4535900
+2014-06-18,83.180000,83.750000,82.809998,83.580002,79.094742,4536300
+2014-06-19,83.650002,83.919998,83.250000,83.769997,79.274544,4326900
+2014-06-20,84.070000,84.080002,82.790001,82.820000,78.375526,11073400
+2014-06-23,82.849998,82.930000,82.320000,82.760002,78.318748,3946600
+2014-06-24,82.680000,83.570000,82.489998,82.680000,78.243042,5057100
+2014-06-25,82.669998,84.320000,82.480003,83.900002,79.397568,7899200
+2014-06-26,84.680000,84.680000,83.769997,84.449997,79.918053,6424000
+2014-06-27,84.129997,85.389999,84.129997,85.300003,80.722443,17932200
+2014-06-30,85.440002,86.070000,85.070000,85.739998,81.138832,6991400
+2014-07-01,85.809998,86.870003,85.760002,86.470001,81.829659,5608200
+2014-07-02,86.580002,86.709999,86.199997,86.440002,81.801270,4439600
+2014-07-03,86.699997,86.980003,86.500000,86.839996,82.179794,3210400
+2014-07-07,86.610001,86.820000,86.320000,86.589996,81.943207,4897800
+2014-07-08,86.190002,86.320000,85.680000,85.860001,81.252396,4733600
+2014-07-09,86.000000,87.290001,85.849998,87.220001,82.539413,6311200
+2014-07-10,86.529999,87.610001,86.190002,86.860001,82.198723,7249700
+2014-07-11,87.000000,87.360001,86.370003,86.889999,82.227112,5016000
+2014-07-14,87.459999,87.489998,86.660004,86.760002,82.104088,4012300
+2014-07-15,86.580002,86.650002,85.540001,86.150002,81.526825,7001300
+2014-07-16,86.690002,87.629997,85.120003,85.349998,80.769760,10667200
+2014-07-17,85.330002,86.190002,84.870003,85.029999,80.466927,6885900
+2014-07-18,85.440002,85.980003,85.169998,85.809998,81.205070,4719100
+2014-07-21,85.809998,86.000000,85.500000,85.739998,81.138832,4799800
+2014-07-22,86.370003,86.559998,85.510002,86.209999,81.583603,4984200
+2014-07-23,86.309998,86.529999,85.760002,86.040001,81.422729,3650200
+2014-07-24,86.500000,86.959999,86.309998,86.800003,82.141945,4168200
+2014-07-25,86.300003,86.699997,86.050003,86.230003,81.602531,4659800
+2014-07-28,86.430000,87.220001,85.809998,87.150002,82.473160,4546800
+2014-07-29,87.290001,87.300003,86.160004,86.199997,81.574142,5478700
+2014-07-30,86.620003,87.269997,86.089996,87.209999,82.529938,7294200
+2014-07-31,86.589996,86.949997,85.690002,85.879997,81.271324,6342400
+2014-08-01,85.389999,86.010002,85.209999,85.379997,80.798157,6759200
+2014-08-04,86.169998,87.519997,86.010002,87.239998,82.558327,8262700
+2014-08-05,87.139999,87.239998,86.019997,86.750000,82.094627,9361800
+2014-08-06,86.180000,87.620003,86.169998,86.589996,81.943207,9789500
+2014-08-07,86.989998,87.010002,85.230003,85.510002,80.921181,7916900
+2014-08-08,85.650002,86.889999,85.620003,86.849998,82.189255,6382700
+2014-08-11,87.070000,87.980003,87.050003,87.500000,82.804382,5047900
+2014-08-12,87.150002,87.620003,86.779999,87.209999,82.529938,4510100
+2014-08-13,87.739998,87.949997,87.419998,87.599998,82.899017,4532300
+2014-08-14,88.029999,88.910004,87.809998,88.610001,83.854813,8218000
+2014-08-15,89.019997,89.470001,88.690002,89.279999,84.488846,8211000
+2014-08-18,89.660004,90.370003,89.580002,89.970001,85.141830,4973300
+2014-08-19,90.129997,90.290001,89.639999,90.089996,85.255402,4153000
+2014-08-20,89.919998,90.050003,89.669998,89.839996,85.018799,3894500
+2014-08-21,90.059998,90.529999,89.980003,90.379997,85.529816,4719100
+2014-08-22,90.129997,90.900002,90.120003,90.489998,85.633926,4756400
+2014-08-25,90.949997,91.139999,90.300003,90.379997,85.529816,3515400
+2014-08-26,90.610001,90.669998,89.940002,90.019997,85.189156,4117700
+2014-08-27,90.139999,90.410004,89.980003,90.370003,85.520363,3739700
+2014-08-28,89.980003,90.489998,89.959999,90.230003,85.387871,2678400
+2014-08-29,90.320000,90.379997,89.330002,89.879997,85.056648,4179400
+2014-09-02,90.180000,90.930000,90.150002,90.800003,85.927292,5602000
+2014-09-03,91.059998,91.160004,90.669998,90.940002,86.059776,3684000
+2014-09-04,90.870003,91.199997,90.010002,90.139999,85.302704,5458700
+2014-09-05,90.139999,90.940002,90.099998,90.940002,86.059776,4597100
+2014-09-08,90.760002,90.839996,90.160004,90.559998,85.700150,4331200
+2014-09-09,90.500000,90.540001,89.510002,89.599998,84.791679,4797700
+2014-09-10,89.540001,89.820000,88.750000,89.519997,84.715981,5078600
+2014-09-11,89.199997,90.040001,89.059998,89.970001,85.141830,4065500
+2014-09-12,90.099998,90.199997,89.250000,89.669998,84.857918,3870200
+2014-09-15,89.739998,90.370003,89.559998,90.080002,85.245934,4910000
+2014-09-16,89.709999,90.430000,89.459999,90.269997,85.425720,4324600
+2014-09-17,90.580002,90.769997,89.970001,90.330002,85.482513,4058100
+2014-09-18,90.309998,90.500000,89.910004,90.339996,85.491966,4343600
+2014-09-19,90.800003,91.150002,90.389999,90.489998,85.633926,13382900
+2014-09-22,90.489998,90.610001,89.190002,89.290001,84.498322,5003000
+2014-09-23,89.120003,89.239998,88.230003,88.309998,83.570915,6155800
+2014-09-24,88.360001,89.639999,88.239998,89.449997,84.649727,5607000
+2014-09-25,89.260002,89.599998,88.059998,88.070000,83.343781,7077400
+2014-09-26,88.150002,88.889999,87.910004,88.739998,83.977829,5786400
+2014-09-29,88.120003,89.050003,87.360001,88.830002,84.063004,5188600
+2014-09-30,88.839996,89.540001,88.540001,89.029999,84.252274,4824500
+2014-10-01,89.080002,89.239998,87.199997,87.489998,82.794922,7512500
+2014-10-02,87.500000,87.660004,84.650002,86.790001,82.132477,11051100
+2014-10-03,88.099998,88.900002,87.550003,88.449997,83.703400,7549000
+2014-10-06,88.790001,89.180000,88.070000,88.559998,83.807487,4047800
+2014-10-07,88.000000,88.260002,87.040001,87.110001,82.435303,6217600
+2014-10-08,87.059998,88.150002,86.330002,88.110001,83.381645,7008000
+2014-10-09,88.029999,88.160004,85.570000,85.709999,81.110435,8726200
+2014-10-10,85.919998,87.000000,85.400002,86.269997,81.640388,12675300
+2014-10-13,85.919998,86.389999,83.760002,83.910004,79.407036,9039800
+2014-10-14,84.750000,85.519997,83.900002,84.139999,79.624687,8607100
+2014-10-15,82.389999,83.760002,78.540001,82.080002,77.675247,21844400
+2014-10-16,79.930000,82.470001,79.550003,81.739998,77.353477,13216500
+2014-10-17,82.680000,84.660004,82.680000,83.830002,79.331322,12381800
+2014-10-20,84.230003,85.610001,84.150002,85.519997,80.930626,8302100
+2014-10-21,86.320000,87.680000,86.250000,87.540001,82.842232,9170400
+2014-10-22,87.940002,88.610001,86.980003,87.099998,82.425842,9314500
+2014-10-23,88.209999,88.629997,87.760002,87.989998,83.268082,7245300
+2014-10-24,87.910004,88.660004,87.580002,88.610001,83.854813,4937500
+2014-10-27,88.470001,88.989998,88.239998,88.449997,83.703400,5644300
+2014-10-28,89.040001,89.959999,88.949997,89.930000,85.103973,4554400
+2014-10-29,89.839996,90.250000,88.949997,89.529999,84.725441,5442100
+2014-10-30,89.440002,90.540001,88.910004,90.220001,85.378418,4445300
+2014-10-31,91.510002,91.980003,90.750000,91.379997,86.476158,7472200
+2014-11-03,91.239998,91.820000,91.000000,91.709999,86.788460,5447700
+2014-11-04,91.970001,91.980003,89.800003,90.349998,85.501442,7928500
+2014-11-05,91.599998,91.599998,89.589996,91.000000,86.116554,7021000
+2014-11-06,91.290001,92.000000,91.150002,92.000000,87.062889,9938800
+2014-11-07,89.389999,90.019997,88.650002,90.000000,85.170227,16176300
+2014-11-10,90.239998,90.419998,88.949997,89.779999,84.962021,8155700
+2014-11-11,90.010002,90.029999,89.129997,89.980003,85.151291,5896800
+2014-11-12,89.769997,90.120003,89.330002,89.900002,85.075577,5262500
+2014-11-13,90.099998,90.709999,90.029999,90.489998,85.633926,5098800
+2014-11-14,90.680000,90.849998,90.379997,90.800003,85.927292,4281100
+2014-11-17,90.500000,90.779999,90.050003,90.410004,85.558220,4148400
+2014-11-18,90.330002,90.690002,90.129997,90.279999,85.435188,4461600
+2014-11-19,90.260002,90.279999,89.190002,89.820000,84.999886,7710000
+2014-11-20,89.250000,89.459999,88.750000,88.900002,84.129250,8604000
+2014-11-21,89.690002,89.800003,88.760002,88.959999,84.186035,7383500
+2014-11-24,89.320000,90.660004,89.320000,90.580002,85.719093,6504100
+2014-11-25,90.709999,91.919998,90.709999,91.650002,86.731667,7404200
+2014-11-26,91.959999,92.279999,91.669998,91.919998,86.987183,4607200
+2014-11-28,92.529999,92.959999,92.230003,92.510002,87.545525,3853000
+2014-12-01,92.629997,93.139999,92.099998,92.699997,87.725327,6714700
+2014-12-02,92.650002,93.809998,92.419998,93.470001,88.454002,6664200
+2014-12-03,93.540001,93.660004,92.500000,93.110001,88.113319,5492400
+2014-12-04,93.610001,93.629997,92.570000,93.230003,88.226883,6017300
+2014-12-05,93.690002,93.980003,93.339996,93.760002,88.728447,5226800
+2014-12-08,94.209999,94.500000,93.410004,93.800003,88.766312,6682200
+2014-12-09,93.510002,93.570000,91.760002,92.940002,87.952454,9519900
+2014-12-10,92.519997,92.830002,91.550003,91.629997,86.712746,10346100
+2014-12-11,91.089996,92.660004,90.660004,91.750000,87.929871,8227800
+2014-12-12,91.180000,92.610001,90.949997,91.489998,87.680695,7890900
+2014-12-15,92.029999,92.690002,90.190002,90.900002,87.115257,6746800
+2014-12-16,90.500000,92.000000,90.129997,90.169998,86.415649,6549400
+2014-12-17,90.360001,91.680000,90.199997,91.379997,87.575272,6885900
+2014-12-18,92.190002,92.610001,91.860001,92.610001,88.754059,9387500
+2014-12-19,92.339996,93.199997,91.879997,92.889999,89.022400,12837900
+2014-12-22,93.389999,94.360001,93.220001,94.199997,90.277855,7477500
+2014-12-23,94.480003,95.070000,94.400002,94.690002,90.747459,5385800
+2014-12-24,94.690002,95.120003,94.370003,94.419998,90.488693,2165700
+2014-12-26,94.739998,95.309998,94.519997,95.029999,91.073296,3142400
+2014-12-29,94.739998,95.930000,94.690002,95.500000,91.523727,3584400
+2014-12-30,95.209999,95.500000,94.500000,94.699997,90.757042,3490100
+2014-12-31,95.129997,95.519997,94.029999,94.190002,90.268272,4797000
+2015-01-02,94.910004,95.279999,92.849998,93.750000,89.846596,5865400
+2015-01-05,93.279999,93.349998,91.779999,92.379997,88.533630,7789400
+2015-01-06,92.570000,93.190002,91.160004,91.889999,88.064034,6793100
+2015-01-07,92.720001,93.150002,92.099998,92.830002,88.964905,6589500
+2015-01-08,93.870003,94.300003,93.559998,93.790001,89.884926,7579400
+2015-01-09,94.050003,95.269997,93.989998,94.250000,90.325775,8603600
+2015-01-12,94.690002,94.760002,93.769997,94.459999,90.527031,6586200
+2015-01-13,95.279999,96.430000,94.699997,95.190002,91.226639,9354400
+2015-01-14,94.000000,94.800003,93.290001,94.230003,90.306618,6509300
+2015-01-15,94.230003,94.959999,94.040001,94.349998,90.421623,5662200
+2015-01-16,94.000000,95.230003,93.980003,95.180000,91.217056,6117000
+2015-01-20,95.220001,95.389999,93.750000,94.739998,90.795364,7940600
+2015-01-21,94.370003,94.739998,93.820000,94.169998,90.249107,7474700
+2015-01-22,94.580002,95.389999,94.010002,95.150002,91.188301,7014200
+2015-01-23,94.919998,95.529999,94.430000,94.720001,90.776207,4655900
+2015-01-26,94.449997,95.000000,93.930000,94.970001,91.015808,5446900
+2015-01-27,93.940002,94.739998,93.360001,93.970001,90.057449,5390000
+2015-01-28,94.139999,94.470001,92.610001,92.669998,88.811562,6198400
+2015-01-29,93.110001,93.500000,91.519997,93.220001,89.338661,7681700
+2015-01-30,92.500000,92.940002,90.830002,90.959999,87.172760,10212300
+2015-02-02,91.300003,92.050003,90.059998,91.930000,88.102371,9392000
+2015-02-03,92.430000,94.110001,92.190002,94.099998,90.182022,11393800
+2015-02-04,99.389999,101.940002,98.820000,101.279999,97.063072,32640500
+2015-02-05,100.760002,102.839996,100.510002,102.639999,98.366455,14089500
+2015-02-06,102.669998,102.989998,101.650002,102.019997,97.772255,8475600
+2015-02-09,101.800003,102.500000,101.449997,101.730003,97.494339,5461800
+2015-02-10,101.959999,102.190002,101.180000,101.919998,97.676430,5726300
+2015-02-11,102.010002,102.019997,101.070000,101.870003,97.628510,5868200
+2015-02-12,102.160004,103.849998,101.699997,103.580002,99.267319,6816800
+2015-02-13,103.730003,104.410004,103.430000,104.169998,99.832741,5175600
+2015-02-17,104.230003,104.449997,103.570000,104.120003,99.784828,4759300
+2015-02-18,103.400002,104.330002,103.400002,103.860001,99.535652,3740100
+2015-02-19,104.000000,104.080002,103.550003,103.889999,99.564400,5094100
+2015-02-20,103.860001,104.690002,102.730003,104.550003,100.196922,5197000
+2015-02-23,104.760002,105.230003,104.440002,104.989998,100.618599,5587100
+2015-02-24,104.540001,105.050003,104.349998,104.669998,100.311920,4098800
+2015-02-25,104.680000,105.980003,104.669998,105.570000,101.174454,6547200
+2015-02-26,105.500000,105.709999,104.139999,104.559998,100.206497,5049000
+2015-02-27,104.669998,105.139999,103.959999,104.080002,99.746490,4161600
+2015-03-02,104.349998,105.980003,104.349998,105.889999,101.481125,5635500
+2015-03-03,105.779999,106.639999,105.209999,106.349998,101.921982,4671900
+2015-03-04,105.860001,106.000000,104.779999,105.570000,101.174454,4972300
+2015-03-05,105.540001,105.809998,104.870003,105.029999,100.656937,3954700
+2015-03-06,104.669998,105.000000,103.489998,103.820000,99.497314,5704200
+2015-03-09,103.940002,105.419998,103.820000,105.260002,100.877373,4868900
+2015-03-10,104.269997,104.349998,103.089996,103.089996,98.797707,6462100
+2015-03-11,103.510002,104.000000,102.870003,102.889999,98.606041,4831900
+2015-03-12,103.849998,107.330002,103.809998,107.169998,102.707825,10730400
+2015-03-13,107.279999,107.279999,105.519997,106.440002,102.008232,8024200
+2015-03-16,106.839996,107.730003,106.050003,107.370003,102.899513,6407300
+2015-03-17,107.000000,107.209999,106.150002,106.959999,102.506577,4489400
+2015-03-18,106.900002,108.419998,105.730003,107.970001,103.474533,5517500
+2015-03-19,107.839996,108.480003,107.139999,107.370003,102.899513,4885600
+2015-03-20,107.830002,108.940002,107.500000,108.430000,103.915367,8710100
+2015-03-23,108.599998,108.849998,108.040001,108.220001,103.714119,5176100
+2015-03-24,107.720001,108.050003,107.080002,107.110001,102.650337,4493500
+2015-03-25,107.449997,107.459999,105.000000,105.000000,100.628189,6964700
+2015-03-26,104.470001,105.820000,104.099998,105.239998,100.858185,5915800
+2015-03-27,105.199997,106.449997,104.879997,105.480003,101.088203,5320600
+2015-03-30,106.059998,106.949997,106.050003,106.120003,101.701553,3831900
+2015-03-31,105.599998,106.320000,104.889999,104.889999,100.522766,5701600
+2015-04-01,105.430000,106.000000,104.250000,105.440002,101.049866,6106000
+2015-04-02,105.489998,106.239998,104.860001,106.000000,101.586540,3651600
+2015-04-06,105.309998,106.570000,105.040001,105.629997,101.231949,4153500
+2015-04-07,105.800003,106.410004,105.400002,105.430000,101.040283,4694600
+2015-04-08,105.230003,106.690002,105.160004,106.360001,101.931557,4277400
+2015-04-09,106.510002,107.000000,105.879997,106.769997,102.324478,3924400
+2015-04-10,106.730003,107.099998,106.410004,106.949997,102.496994,3251500
+2015-04-13,107.070000,107.900002,106.470001,106.510002,102.075310,4201500
+2015-04-14,106.410004,107.300003,106.110001,106.660004,102.219078,5387700
+2015-04-15,106.919998,107.430000,106.860001,106.980003,102.525749,4307300
+2015-04-16,107.150002,108.300003,106.900002,108.099998,103.599106,6379100
+2015-04-17,107.800003,107.800003,106.040001,106.690002,102.247818,9663500
+2015-04-20,107.830002,108.500000,107.320000,108.220001,103.714119,8085800
+2015-04-21,108.930000,108.989998,107.580002,107.680000,103.196609,4579400
+2015-04-22,108.019997,108.250000,106.970001,107.940002,103.445778,4060500
+2015-04-23,107.959999,109.900002,107.610001,109.000000,104.461632,5983800
+2015-04-24,109.529999,110.480003,109.190002,109.529999,104.969566,6355600
+2015-04-27,111.500000,111.660004,110.070000,110.160004,105.573357,8479100
+2015-04-28,110.750000,111.150002,108.889999,109.919998,105.343338,7324500
+2015-04-29,109.660004,110.379997,109.260002,109.809998,105.237907,6024700
+2015-04-30,109.680000,110.190002,107.680000,108.720001,104.193298,7770300
+2015-05-01,109.949997,110.669998,109.269997,110.519997,105.918358,6205100
+2015-05-04,111.480003,111.529999,110.500000,111.029999,106.407112,7566200
+2015-05-05,113.269997,113.300003,110.559998,110.809998,106.196274,14619300
+2015-05-06,111.559998,111.959999,109.220001,109.720001,105.151665,8975300
+2015-05-07,109.599998,109.779999,108.720001,109.260002,104.710823,8291200
+2015-05-08,110.260002,110.699997,109.570000,110.110001,105.525429,6260600
+2015-05-11,110.139999,110.139999,108.489998,108.599998,104.078285,6728500
+2015-05-12,108.599998,109.660004,107.669998,109.239998,104.691635,6345000
+2015-05-13,109.699997,109.870003,108.779999,109.190002,104.643738,4141400
+2015-05-14,109.690002,110.000000,109.010002,109.930000,105.352913,4662000
+2015-05-15,110.099998,110.690002,109.680000,110.300003,105.707512,5852200
+2015-05-18,110.470001,110.739998,110.010002,110.330002,105.736259,4863600
+2015-05-19,110.690002,110.989998,110.470001,110.559998,105.956688,4549600
+2015-05-20,110.779999,110.809998,110.019997,110.199997,105.611671,4393500
+2015-05-21,110.040001,110.639999,109.900002,110.389999,105.793762,4880600
+2015-05-22,110.300003,110.900002,110.120003,110.260002,105.669189,4267700
+2015-05-26,110.169998,110.849998,108.760002,109.440002,104.883316,5970800
+2015-05-27,109.510002,110.610001,109.300003,110.370003,105.774597,3984200
+2015-05-28,110.110001,110.540001,109.599998,110.529999,105.927940,3392000
+2015-05-29,110.330002,110.519997,109.410004,110.370003,105.774597,5253900
+2015-06-01,111.480003,111.769997,110.550003,110.959999,106.340034,6778900
+2015-06-02,110.919998,111.250000,110.019997,110.750000,106.138779,4410100
+2015-06-03,110.750000,111.820000,110.500000,111.169998,106.541290,4510300
+2015-06-04,110.949997,111.250000,109.989998,110.300003,105.707512,5748200
+2015-06-05,110.300003,110.769997,109.800003,110.300003,105.707512,4323500
+2015-06-08,110.029999,110.239998,109.209999,109.290001,104.739563,6062300
+2015-06-09,109.349998,109.410004,107.650002,108.519997,104.001633,7711800
+2015-06-10,108.900002,110.250000,108.730003,110.000000,105.419998,5128700
+2015-06-11,110.010002,110.959999,110.010002,110.620003,106.014191,5036300
+2015-06-12,110.330002,110.529999,109.540001,109.949997,105.372086,4009900
+2015-06-15,109.269997,110.410004,108.930000,110.180000,105.592506,5742200
+2015-06-16,110.349998,111.290001,109.900002,111.059998,106.435875,3848300
+2015-06-17,111.199997,111.940002,110.879997,111.489998,106.847969,4732300
+2015-06-18,111.830002,113.550003,111.580002,113.220001,108.505936,6929900
+2015-06-19,112.830002,113.480003,112.400002,112.620003,107.930923,9587700
+2015-06-22,113.400002,114.470001,113.269997,113.529999,108.803017,5906200
+2015-06-23,113.930000,114.519997,113.720001,114.410004,109.646385,6003100
+2015-06-24,114.300003,114.559998,113.709999,113.769997,109.033035,5636200
+2015-06-25,114.620003,115.279999,114.389999,114.449997,109.684731,6213500
+2015-06-26,114.970001,115.180000,114.410004,114.989998,110.202240,9509600
+2015-06-29,114.160004,115.190002,113.000000,113.050003,108.343010,7618200
+2015-06-30,113.930000,114.860001,113.550003,114.139999,109.387634,6946700
+2015-07-01,114.949997,115.250000,114.339996,115.129997,110.978127,5435400
+2015-07-02,115.379997,115.669998,114.449997,114.970001,110.823891,5549500
+2015-07-06,114.470001,115.860001,114.269997,115.699997,111.527565,5880500
+2015-07-07,116.419998,117.190002,114.860001,117.099998,112.877083,8359900
+2015-07-08,116.500000,116.680000,115.110001,115.190002,111.035965,8445200
+2015-07-09,116.830002,117.209999,115.510002,115.599998,111.431168,7016800
+2015-07-10,117.050003,117.430000,115.669998,116.440002,112.240883,7020700
+2015-07-13,117.750000,118.309998,117.250000,118.050003,113.792831,7595200
+2015-07-14,118.029999,118.139999,117.699997,117.849998,113.600029,5061700
+2015-07-15,118.040001,118.900002,117.849998,118.300003,114.033813,4845200
+2015-07-16,119.000000,119.150002,118.540001,119.070000,114.776031,6303000
+2015-07-17,118.690002,118.900002,118.029999,118.860001,114.573616,5109900
+2015-07-20,119.290001,119.900002,118.889999,119.580002,115.267647,6611400
+2015-07-21,119.760002,119.800003,118.830002,119.309998,115.007385,4785200
+2015-07-22,119.279999,119.699997,118.930000,119.330002,115.026665,4714100
+2015-07-23,119.300003,119.669998,118.570000,118.800003,114.515785,3347200
+2015-07-24,119.349998,119.800003,118.580002,118.910004,114.621811,4320900
+2015-07-27,118.629997,118.800003,117.779999,118.250000,113.985611,5179300
+2015-07-28,118.680000,118.900002,117.870003,118.459999,114.188026,6447300
+2015-07-29,118.849998,119.860001,118.410004,119.839996,115.518272,6083500
+2015-07-30,119.709999,120.349998,119.010002,120.029999,115.701424,4430100
+2015-07-31,120.120003,120.720001,119.599998,120.000000,115.672501,5767300
+2015-08-03,120.879997,121.730003,120.169998,121.120003,116.752121,8426900
+2015-08-04,121.500000,122.080002,120.610001,121.690002,117.301552,12624400
+2015-08-05,110.830002,113.949997,109.500000,110.529999,106.544014,61011200
+2015-08-06,110.400002,110.400002,104.239998,108.550003,104.635429,57175200
+2015-08-07,108.750000,109.559998,107.660004,109.349998,105.406570,15791600
+2015-08-10,110.000000,111.000000,109.739998,111.000000,106.997055,11617000
+2015-08-11,110.339996,110.489998,107.949997,108.000000,104.105263,13528500
+2015-08-12,107.000000,107.440002,105.510002,106.989998,103.131668,16923500
+2015-08-13,107.209999,108.510002,106.750000,107.519997,103.642555,9754500
+2015-08-14,107.610001,107.669998,106.519997,107.160004,103.295547,7088700
+2015-08-17,107.300003,109.279999,106.839996,109.050003,105.117378,7954000
+2015-08-18,108.040001,108.250000,106.800003,106.940002,103.083481,12021500
+2015-08-19,106.580002,107.750000,105.769997,106.449997,102.611137,9484400
+2015-08-20,104.339996,105.000000,99.760002,100.019997,96.413033,34651300
+2015-08-21,97.500000,100.629997,96.610001,98.839996,95.275581,30875700
+2015-08-24,93.379997,100.000000,90.000000,95.360001,91.921097,30159200
+2015-08-25,99.730003,100.089996,95.720001,95.889999,92.431976,19079400
+2015-08-26,98.690002,99.489998,96.260002,99.230003,95.651527,13755900
+2015-08-27,101.349998,102.620003,99.779999,102.169998,98.485497,17532200
+2015-08-28,102.190002,103.339996,101.809998,102.480003,98.784317,11005200
+2015-08-31,102.300003,102.459999,100.910004,101.879997,98.205948,8473600
+2015-09-01,99.309998,101.339996,99.160004,99.510002,95.921432,14212600
+2015-09-02,100.919998,101.940002,99.550003,101.889999,98.215591,12148200
+2015-09-03,102.199997,103.029999,101.459999,101.989998,98.311981,8953300
+2015-09-04,100.959999,101.820000,100.360001,100.970001,97.328766,9213800
+2015-09-08,102.949997,104.150002,102.519997,104.010002,100.259140,8571500
+2015-09-09,104.750000,104.949997,101.680000,101.910004,98.234879,9495600
+2015-09-10,101.849998,103.220001,101.330002,102.599998,98.899979,9026100
+2015-09-11,102.349998,104.500000,102.199997,104.480003,100.712196,9529800
+2015-09-14,104.650002,104.900002,102.989998,103.820000,100.075996,6959800
+2015-09-15,103.250000,103.800003,101.830002,103.430000,99.700058,9153500
+2015-09-16,103.320000,104.070000,102.750000,103.959999,100.210953,6509700
+2015-09-17,104.199997,105.949997,103.750000,104.199997,100.442284,7792600
+2015-09-18,103.199997,104.209999,102.389999,102.839996,99.131332,13423100
+2015-09-21,103.680000,103.830002,102.459999,103.410004,99.680786,7152300
+2015-09-22,102.180000,102.750000,101.480003,102.489998,98.793953,8330100
+2015-09-23,102.440002,102.629997,101.379997,101.570000,97.907135,5818000
+2015-09-24,101.000000,101.330002,99.239998,100.620003,96.991386,10706800
+2015-09-25,101.510002,101.800003,99.580002,100.300003,96.682938,7179800
+2015-09-28,99.900002,100.389999,98.300003,98.489998,94.938202,8238300
+2015-09-29,98.510002,100.059998,97.769997,99.419998,95.834663,9466900
+2015-09-30,100.779999,102.430000,100.500000,102.199997,98.514412,9125400
+2015-10-01,102.970001,103.470001,101.080002,102.669998,98.967468,7503700
+2015-10-02,101.209999,103.010002,99.879997,103.000000,99.285561,9105600
+2015-10-05,103.699997,104.199997,102.610001,103.849998,100.104912,7144600
+2015-10-06,104.190002,104.510002,103.209999,103.769997,100.027794,5660600
+2015-10-07,104.489998,104.589996,102.660004,103.389999,99.661499,6947200
+2015-10-08,103.199997,104.889999,102.699997,104.610001,100.837509,7499100
+2015-10-09,105.089996,106.050003,104.669998,105.559998,101.753242,7341800
+2015-10-12,105.910004,106.699997,105.620003,106.349998,102.514755,5179000
+2015-10-13,105.739998,107.389999,105.309998,106.589996,102.746101,8342700
+2015-10-14,106.500000,106.849998,105.230003,105.730003,101.917122,5563600
+2015-10-15,106.500000,108.000000,106.260002,107.889999,103.999214,6739200
+2015-10-16,108.269997,108.500000,107.459999,108.239998,104.336594,5908900
+2015-10-19,108.250000,109.970001,107.940002,109.470001,105.522240,8001900
+2015-10-20,109.550003,110.529999,109.410004,109.839996,105.878883,9542400
+2015-10-21,110.669998,111.559998,110.010002,110.089996,106.119881,8769800
+2015-10-22,110.669998,113.349998,110.389999,113.250000,109.165932,10028900
+2015-10-23,114.209999,114.209999,111.849998,113.089996,109.011688,7928400
+2015-10-26,113.070000,113.580002,112.120003,113.519997,109.426186,5729600
+2015-10-27,113.290001,114.269997,113.250000,113.769997,109.667168,5857600
+2015-10-28,113.970001,114.459999,112.860001,114.339996,110.216614,6452900
+2015-10-29,114.339996,115.400002,114.199997,115.040001,110.891373,6067100
+2015-10-30,115.000000,115.239998,113.669998,113.739998,109.638252,8501600
+2015-11-02,114.489998,115.309998,114.010002,115.040001,110.891373,6949500
+2015-11-03,114.970001,116.400002,114.540001,115.540001,111.373344,7132100
+2015-11-04,116.639999,116.830002,110.809998,113.250000,109.165932,20690800
+2015-11-05,113.260002,113.930000,111.599998,113.000000,108.924934,14843700
+2015-11-06,114.599998,116.750000,114.570000,115.669998,111.498657,16803400
+2015-11-09,115.900002,116.730003,115.180000,116.419998,112.221611,9317500
+2015-11-10,116.169998,117.510002,115.510002,117.419998,113.185547,8240500
+2015-11-11,117.550003,117.580002,116.430000,116.519997,112.317993,6381700
+2015-11-12,115.599998,116.989998,115.000000,116.209999,112.019173,7223000
+2015-11-13,115.919998,116.419998,114.379997,114.839996,110.698586,8078100
+2015-11-16,113.470001,116.080002,113.339996,115.919998,111.739639,5770200
+2015-11-17,116.110001,117.550003,115.510002,116.129997,111.942055,6434400
+2015-11-18,116.220001,118.279999,116.050003,118.139999,113.879585,6744600
+2015-11-19,118.139999,119.160004,117.639999,118.709999,114.429016,6101000
+2015-11-20,119.110001,120.250000,118.900002,120.070000,115.739975,10821700
+2015-11-23,120.300003,120.650002,119.000000,119.419998,115.113419,6986400
+2015-11-24,117.900002,118.570000,117.279999,117.949997,113.696434,9158200
+2015-11-25,118.290001,119.339996,118.150002,118.669998,114.390465,4751500
+2015-11-27,116.000000,116.500000,113.699997,115.129997,110.978127,14912000
+2015-11-30,115.559998,115.580002,113.309998,113.470001,109.377991,16560300
+2015-12-01,114.150002,115.459999,113.660004,115.389999,111.228752,8896100
+2015-12-02,115.389999,115.470001,113.830002,114.000000,109.888878,7787800
+2015-12-03,114.169998,114.650002,111.440002,111.889999,107.854965,10446600
+2015-12-04,112.739998,114.309998,112.529999,114.239998,110.120224,8771300
+2015-12-07,114.559998,114.559998,112.650002,113.830002,109.725014,6738500
+2015-12-08,113.349998,113.449997,112.400002,112.480003,108.423698,7480600
+2015-12-09,112.389999,113.059998,110.580002,111.470001,107.450111,9050700
+2015-12-10,111.150002,111.580002,110.169998,110.760002,107.450111,6954500
+2015-12-11,110.760002,110.760002,107.620003,108.040001,104.811401,12334900
+2015-12-14,108.680000,109.870003,108.279999,109.349998,106.082253,9596900
+2015-12-15,112.050003,113.349998,111.580002,112.160004,108.808281,13361700
+2015-12-16,114.690002,114.750000,111.800003,113.790001,110.389565,12304500
+2015-12-17,114.129997,114.480003,111.980003,112.010002,108.662758,9375000
+2015-12-18,112.010002,112.440002,107.349998,107.720001,104.500961,28376900
+2015-12-21,108.800003,110.099998,105.330002,106.589996,103.404716,22847200
+2015-12-22,106.989998,107.199997,105.830002,106.739998,103.550240,8514200
+2015-12-23,107.209999,107.239998,104.300003,105.559998,102.405502,12372900
+2015-12-24,105.199997,106.639999,105.059998,105.860001,102.696541,4356100
+2015-12-28,106.500000,108.199997,106.330002,107.250000,104.044998,8787300
+2015-12-29,107.879997,108.040001,106.449997,107.080002,103.880089,8607200
+2015-12-30,106.889999,107.209999,106.250000,106.339996,103.162193,4917000
+2015-12-31,106.139999,106.309998,105.059998,105.080002,101.939850,6670100
+2016-01-04,103.120003,103.430000,101.730003,102.980003,99.902618,12531500
+2016-01-05,102.669998,102.669998,99.889999,100.900002,97.884766,16212900
+2016-01-06,99.379997,101.459999,99.360001,100.360001,97.360901,13934500
+2016-01-07,98.959999,101.349998,98.519997,99.500000,96.526596,14680200
+2016-01-08,100.629997,100.919998,99.000000,99.250000,96.284065,10691600
+2016-01-11,100.209999,100.449997,98.550003,99.919998,96.934044,9558600
+2016-01-12,100.970001,101.849998,100.339996,101.459999,98.428032,8863900
+2016-01-13,101.879997,101.879997,98.120003,98.480003,95.537086,11615400
+2016-01-14,98.650002,99.910004,97.190002,99.110001,96.148254,11147500
+2016-01-15,95.620003,96.879997,93.459999,93.900002,91.093956,22337000
+2016-01-19,95.000000,95.070000,93.029999,93.970001,91.161850,13286800
+2016-01-20,92.180000,93.639999,90.419998,92.540001,89.774597,16918100
+2016-01-21,92.870003,94.860001,92.300003,94.019997,91.210358,12078200
+2016-01-22,95.949997,97.419998,95.550003,96.900002,94.004303,12303800
+2016-01-25,96.419998,96.680000,95.120003,95.290001,92.442413,7812200
+2016-01-26,95.489998,96.430000,95.089996,96.269997,93.393120,6895000
+2016-01-27,96.309998,96.629997,93.870003,94.320000,91.501396,6328900
+2016-01-28,95.199997,95.199997,92.370003,93.529999,90.735001,7312700
+2016-01-29,94.209999,95.820000,93.629997,95.820000,92.956573,8211600
+2016-02-01,95.080002,95.709999,94.699997,95.150002,92.306595,7027200
+2016-02-02,94.000000,94.169998,92.870003,93.120003,90.337265,7244400
+2016-02-03,94.099998,95.410004,92.419998,95.139999,92.296883,10878000
+2016-02-04,94.870003,96.730003,94.599998,95.430000,92.578232,8207900
+2016-02-05,95.320000,95.389999,93.470001,93.900002,91.093956,8898500
+2016-02-08,92.559998,92.959999,89.510002,92.120003,89.367142,13750100
+2016-02-09,90.150002,93.199997,89.040001,92.320000,89.561165,15166900
+2016-02-10,88.000000,90.029999,86.250000,88.849998,86.194855,32649800
+2016-02-11,87.000000,91.059998,86.959999,90.309998,87.611229,17523800
+2016-02-12,91.519997,91.589996,89.610001,91.150002,88.426132,10786100
+2016-02-16,92.470001,93.309998,91.790001,92.910004,90.133530,9050900
+2016-02-17,93.669998,96.440002,93.559998,95.500000,92.646133,12610000
+2016-02-18,96.339996,96.449997,94.449997,95.169998,92.325996,8773900
+2016-02-19,95.230003,95.720001,94.559998,95.010002,92.170769,6838200
+2016-02-22,95.010002,97.000000,95.010002,96.370003,93.490135,6980400
+2016-02-23,96.500000,96.820000,95.150002,95.379997,92.529724,6253800
+2016-02-24,94.650002,95.750000,93.230003,95.430000,92.578232,6945600
+2016-02-25,95.919998,95.949997,94.349998,95.650002,92.791656,4536900
+2016-02-26,95.709999,96.220001,95.279999,95.309998,92.461807,5277000
+2016-02-29,95.309998,96.290001,95.230003,95.519997,92.665535,7794400
+2016-03-01,95.900002,97.660004,95.529999,97.650002,94.731888,7139900
+2016-03-02,97.669998,97.830002,96.410004,97.000000,94.101311,6498600
+2016-03-03,97.790001,98.839996,96.889999,98.820000,95.866920,8536100
+2016-03-04,99.000000,99.209999,97.900002,98.480003,95.537086,6739800
+2016-03-07,98.570000,99.709999,98.480003,99.389999,96.419891,7218700
+2016-03-08,98.930000,98.980003,97.599998,97.820000,94.896805,6229300
+2016-03-09,98.000000,98.239998,96.809998,97.660004,94.741585,7010700
+2016-03-10,97.940002,98.419998,95.870003,97.040001,94.140114,5417500
+2016-03-11,97.570000,98.349998,97.540001,97.940002,95.013222,5605800
+2016-03-14,97.940002,99.220001,97.839996,98.809998,95.857216,6175000
+2016-03-15,98.019997,98.330002,97.389999,98.239998,95.304245,5942400
+2016-03-16,98.160004,99.080002,97.470001,98.440002,95.498283,6079400
+2016-03-17,98.279999,99.910004,98.279999,99.599998,96.623619,6385600
+2016-03-18,99.980003,100.150002,98.959999,99.199997,96.235565,10205100
+2016-03-21,98.919998,99.120003,97.699997,98.459999,95.517677,7070300
+2016-03-22,97.589996,98.320000,97.440002,97.580002,94.663979,6475700
+2016-03-23,97.720001,97.849998,96.750000,96.830002,93.936394,5517700
+2016-03-24,96.550003,97.489998,96.300003,97.220001,94.314735,5577700
+2016-03-28,97.500000,98.459999,97.449997,98.089996,95.158730,5317100
+2016-03-29,98.000000,98.370003,97.360001,98.160004,95.226654,5124300
+2016-03-30,98.900002,99.089996,98.349998,98.910004,95.954239,5677500
+2016-03-31,99.010002,100.370003,99.010002,99.309998,96.342278,7038800
+2016-04-01,98.800003,99.339996,98.580002,99.070000,96.109451,5895500
+2016-04-04,99.300003,99.440002,98.580002,98.680000,95.731102,5334700
+2016-04-05,96.849998,97.610001,96.150002,97.000000,94.101311,10818800
+2016-04-06,96.959999,97.629997,96.830002,97.480003,94.566971,6934400
+2016-04-07,97.110001,97.419998,95.919998,96.160004,93.286415,7525400
+2016-04-08,96.629997,96.839996,95.800003,96.419998,93.538635,6982700
+2016-04-11,96.930000,97.639999,96.199997,96.269997,93.393120,5916600
+2016-04-12,96.279999,97.529999,96.070000,97.349998,94.440849,6213400
+2016-04-13,98.029999,99.809998,97.959999,99.480003,96.507202,7967400
+2016-04-14,99.790001,99.870003,98.260002,98.629997,95.682594,6641900
+2016-04-15,98.500000,98.889999,98.180000,98.589996,95.643791,6146800
+2016-04-18,99.599998,101.949997,99.500000,101.480003,98.447433,10650400
+2016-04-19,102.000000,103.639999,101.930000,102.639999,99.572769,10639800
+2016-04-20,103.019997,103.480003,101.959999,103.269997,100.183929,7712000
+2016-04-21,103.360001,104.120003,102.650002,102.910004,99.834702,6824800
+2016-04-22,103.059998,103.989998,102.930000,103.769997,100.668999,5731300
+2016-04-25,103.500000,104.620003,103.349998,104.570000,101.445091,6117100
+2016-04-26,104.580002,105.570000,104.180000,104.889999,101.755524,6452900
+2016-04-27,105.190002,105.599998,104.760002,105.279999,102.133865,5996100
+2016-04-28,104.540001,105.489998,103.800003,104.029999,100.921227,5658300
+2016-04-29,103.949997,104.209999,102.400002,103.260002,100.174240,7006900
+2016-05-02,103.300003,104.570000,102.800003,104.360001,101.241371,5285700
+2016-05-03,103.879997,103.989998,103.190002,103.779999,100.678703,6243300
+2016-05-04,103.309998,104.480003,103.199997,103.669998,100.571983,6445100
+2016-05-05,103.980003,105.129997,103.449997,104.930000,101.794327,6466000
+2016-05-06,104.510002,105.739998,104.389999,105.540001,102.386108,7407500
+2016-05-09,106.010002,106.680000,104.970001,105.339996,102.192078,8130300
+2016-05-10,105.529999,106.750000,105.449997,106.599998,103.414429,10761800
+2016-05-11,101.459999,102.500000,100.620003,102.290001,99.233238,27068100
+2016-05-12,102.500000,102.849998,101.129997,101.709999,98.670563,11251700
+2016-05-13,101.709999,102.040001,100.360001,100.519997,97.516113,9570200
+2016-05-16,100.410004,100.669998,99.370003,100.360001,97.360901,9113000
+2016-05-17,100.790001,101.290001,99.550003,99.940002,96.953445,8694400
+2016-05-18,99.589996,99.769997,98.360001,99.000000,96.041550,9323100
+2016-05-19,98.580002,98.610001,97.510002,98.410004,95.469170,7063500
+2016-05-20,99.000000,99.930000,98.769997,99.779999,96.798225,7297000
+2016-05-23,99.599998,100.330002,99.120003,99.180000,96.216171,5453400
+2016-05-24,99.449997,100.040001,99.379997,99.510002,96.536301,5250800
+2016-05-25,99.919998,100.800003,99.809998,99.860001,96.875847,5667300
+2016-05-26,99.889999,100.489998,99.570000,99.809998,96.827332,3749700
+2016-05-27,99.809998,100.529999,99.809998,100.290001,97.292992,4589100
+2016-05-31,99.849998,99.889999,98.570000,99.220001,96.254974,10396200
+2016-06-01,98.669998,98.989998,97.730003,98.519997,95.575874,7243900
+2016-06-02,98.070000,98.720001,97.870003,98.720001,95.769905,4822100
+2016-06-03,98.660004,98.919998,97.730003,98.750000,95.799011,6857500
+2016-06-06,98.959999,99.120003,98.470001,98.779999,95.828117,4986600
+2016-06-07,99.000000,99.040001,98.320000,98.349998,95.410965,5608400
+2016-06-08,98.489998,98.820000,97.699997,98.040001,95.110229,5466200
+2016-06-09,97.820000,97.930000,97.099998,97.839996,94.916199,5379400
+2016-06-10,97.150002,97.709999,96.779999,97.339996,94.431152,7204400
+2016-06-13,96.750000,98.980003,96.699997,97.570000,94.654274,8766300
+2016-06-14,97.199997,98.550003,97.180000,98.400002,95.459480,6125700
+2016-06-15,98.190002,99.000000,98.190002,98.269997,95.333359,6663200
+2016-06-16,97.800003,98.629997,96.860001,98.379997,95.440071,7129700
+2016-06-17,98.410004,99.139999,98.269997,99.000000,96.041550,8959400
+2016-06-20,100.080002,100.959999,99.500000,99.570000,96.594505,8268600
+2016-06-21,99.730003,99.790001,98.769997,98.820000,95.866920,5202500
+2016-06-22,99.080002,99.620003,98.699997,98.790001,95.837822,4948900
+2016-06-23,99.440002,99.620003,98.610001,99.019997,96.060944,5826100
+2016-06-24,96.010002,97.279999,95.139999,95.720001,92.859558,15079900
+2016-06-27,94.910004,95.230003,94.000000,94.379997,91.559601,9438400
+2016-06-28,95.459999,96.279999,95.139999,96.050003,93.179695,7143100
+2016-06-29,96.910004,97.290001,96.279999,96.980003,94.081909,6563600
+2016-06-30,97.099998,98.070000,96.879997,97.820000,94.896805,7189500
+2016-07-01,97.750000,98.639999,97.709999,98.029999,95.100533,5681600
+2016-07-05,97.769997,98.000000,97.169998,97.660004,94.741585,5846400
+2016-07-06,97.000000,98.519997,96.760002,98.449997,95.507980,6223300
+2016-07-07,97.629997,98.480003,97.580002,98.410004,96.162682,5104500
+2016-07-08,99.250000,99.849998,98.769997,99.620003,97.345055,6045800
+2016-07-11,99.800003,100.349998,99.400002,99.980003,97.696838,5428000
+2016-07-12,100.180000,100.790001,100.000000,100.199997,97.911797,8508500
+2016-07-13,100.459999,100.800003,99.849998,99.879997,97.599113,5494700
+2016-07-14,100.419998,100.800003,99.680000,99.970001,97.687057,7461500
+2016-07-15,100.419998,100.540001,99.540001,99.800003,97.520943,5764400
+2016-07-18,99.900002,100.709999,99.870003,100.150002,97.862938,5051300
+2016-07-19,99.809998,99.989998,99.360001,99.470001,97.198479,5742700
+2016-07-20,98.650002,98.690002,97.139999,98.220001,95.977020,10664900
+2016-07-21,98.169998,99.129997,97.620003,98.010002,95.771812,6726600
+2016-07-22,98.389999,98.419998,97.470001,97.709999,95.478661,5381700
+2016-07-25,97.900002,98.059998,97.250000,97.389999,95.165970,4825500
+2016-07-26,97.019997,97.160004,96.339996,96.690002,94.481964,6274800
+2016-07-27,96.699997,96.930000,95.860001,96.330002,94.130173,6905900
+2016-07-28,96.400002,96.489998,95.820000,95.910004,93.719772,7713500
+2016-07-29,95.849998,96.379997,95.849998,95.949997,93.758850,8264000
+2016-08-01,96.150002,96.199997,95.080002,95.540001,93.358215,7131600
+2016-08-02,95.349998,95.750000,94.559998,95.010002,92.840324,7644100
+2016-08-03,95.059998,96.430000,95.000000,96.089996,93.895660,9068700
+2016-08-04,95.529999,95.980003,94.709999,95.160004,92.986900,9768100
+2016-08-05,95.400002,96.000000,95.349998,95.830002,93.641602,6528500
+2016-08-08,96.110001,96.290001,95.580002,95.750000,93.563423,7792000
+2016-08-09,95.610001,96.849998,95.300003,96.669998,94.462418,15548700
+2016-08-10,96.699997,99.269997,95.849998,97.860001,95.625244,27181300
+2016-08-11,98.199997,98.389999,97.150002,97.769997,95.537292,9056500
+2016-08-12,97.820000,97.820000,96.660004,96.839996,94.628532,9360300
+2016-08-15,97.349998,97.610001,96.919998,97.099998,94.882599,5299600
+2016-08-16,96.860001,97.250000,96.269997,96.879997,94.667618,5777800
+2016-08-17,96.839996,96.970001,96.519997,96.870003,94.657852,5697300
+2016-08-18,96.870003,97.120003,96.550003,96.650002,94.442879,5250700
+2016-08-19,96.400002,96.790001,96.050003,96.389999,94.188805,6362400
+2016-08-22,96.470001,96.470001,95.650002,95.870003,93.680687,5495300
+2016-08-23,96.129997,96.430000,95.800003,95.970001,93.778397,4996300
+2016-08-24,96.010002,96.220001,95.510002,95.820000,93.631828,5912500
+2016-08-25,95.620003,95.820000,95.410004,95.550003,93.367996,4518700
+2016-08-26,95.660004,96.129997,94.750000,95.209999,93.035751,6019600
+2016-08-29,95.070000,95.320000,94.459999,94.870003,92.703529,7858200
+2016-08-30,94.769997,95.290001,94.769997,94.860001,92.693748,6845000
+2016-08-31,94.690002,94.900002,94.160004,94.459999,92.302879,7375200
+2016-09-01,94.730003,94.870003,93.910004,94.260002,92.107452,6274200
+2016-09-02,94.750000,94.959999,94.129997,94.419998,92.263794,5382000
+2016-09-06,94.529999,94.529999,93.349998,93.800003,91.657959,8463500
+2016-09-07,93.980003,94.029999,93.330002,93.709999,91.570015,6281500
+2016-09-08,93.489998,94.209999,92.930000,94.089996,91.941330,7411900
+2016-09-09,93.510002,93.739998,92.389999,92.419998,90.309471,10248400
+2016-09-12,91.959999,93.860001,91.949997,93.639999,91.501610,9367300
+2016-09-13,93.040001,93.300003,92.449997,92.699997,90.583069,7795200
+2016-09-14,92.589996,92.889999,92.070000,92.260002,90.153122,6386500
+2016-09-15,92.120003,92.750000,91.190002,92.500000,90.387642,7559400
+2016-09-16,92.269997,92.739998,91.800003,92.559998,90.446274,10568600
+2016-09-19,92.800003,93.320000,92.339996,92.629997,90.514664,5395300
+2016-09-20,92.699997,93.300003,92.680000,92.949997,90.827370,6200100
+2016-09-21,93.000000,93.080002,91.699997,92.389999,90.280159,12049500
+2016-09-22,92.730003,93.709999,92.639999,93.410004,91.276863,6470700
+2016-09-23,93.070000,93.519997,93.000000,93.269997,91.140053,4407700
+2016-09-26,92.470001,92.680000,91.400002,91.959999,89.859978,10449300
+2016-09-27,91.930000,92.180000,91.500000,91.720001,89.625458,9167400
+2016-09-28,91.750000,92.300003,91.480003,92.199997,90.094482,8272300
+2016-09-29,92.139999,93.099998,91.529999,91.800003,89.703629,7476600
+2016-09-30,92.279999,93.489998,92.209999,92.860001,90.739418,8923700
+2016-10-03,92.470001,92.720001,92.050003,92.489998,90.377876,5834700
+2016-10-04,92.449997,92.989998,92.110001,92.589996,90.475586,5903400
+2016-10-05,92.940002,93.279999,92.379997,92.449997,90.338783,5365000
+2016-10-06,92.750000,93.580002,92.389999,92.830002,90.710106,7484800
+2016-10-07,93.059998,93.290001,92.019997,92.489998,90.377876,6144100
+2016-10-10,92.699997,93.209999,92.400002,92.489998,90.377876,4213900
+2016-10-11,92.330002,92.379997,91.570000,91.860001,89.762260,6651100
+2016-10-12,91.879997,92.080002,91.389999,91.410004,89.322533,6836600
+2016-10-13,90.949997,91.360001,90.320000,91.120003,89.039154,5725200
+2016-10-14,91.769997,91.769997,91.290001,91.300003,89.215057,4345700
+2016-10-17,91.349998,91.470001,90.559998,90.830002,88.755783,5134600
+2016-10-18,91.400002,91.739998,90.980003,91.169998,89.088013,4865600
+2016-10-19,91.269997,92.139999,91.000000,91.930000,89.830658,6140500
+2016-10-20,91.660004,92.320000,91.440002,92.029999,89.928375,6153200
+2016-10-21,91.330002,94.190002,90.599998,93.029999,90.905533,13814500
+2016-10-24,93.489998,94.150002,92.800003,93.370003,91.237778,7281000
+2016-10-25,93.139999,93.360001,92.639999,92.750000,90.631935,6380700
+2016-10-26,92.389999,93.720001,91.900002,93.489998,91.355026,7152800
+2016-10-27,93.800003,94.339996,93.309998,94.019997,91.872925,6647000
+2016-10-28,93.889999,94.400002,93.529999,93.849998,91.706802,7309100
+2016-10-31,93.720001,93.820000,92.309998,92.690002,90.573303,9957600
+2016-11-01,92.779999,92.989998,91.660004,92.389999,90.280159,6449800
+2016-11-02,91.910004,92.559998,91.690002,91.910004,89.811119,6716200
+2016-11-03,92.260002,94.129997,92.239998,93.370003,91.237778,7751400
+2016-11-04,93.500000,93.790001,92.360001,92.449997,90.338783,7456500
+2016-11-07,93.769997,94.480003,92.949997,94.430000,92.273567,6996100
+2016-11-08,94.529999,94.849998,93.900002,94.379997,92.224701,6111000
+2016-11-09,92.290001,94.919998,92.110001,94.639999,92.478775,8635700
+2016-11-10,94.889999,96.059998,94.230003,94.959999,92.791466,13777900
+2016-11-11,97.180000,98.320000,95.779999,97.680000,95.449348,23008100
+2016-11-14,97.559998,98.309998,97.449997,97.919998,95.683868,13181000
+2016-11-15,97.900002,98.139999,97.419998,97.699997,95.468895,7184600
+2016-11-16,98.330002,99.169998,98.099998,99.120003,96.856476,9690100
+2016-11-17,99.050003,99.500000,98.269997,99.370003,97.100761,8662600
+2016-11-18,99.050003,99.370003,98.139999,98.239998,95.996559,8586100
+2016-11-21,98.110001,98.269997,97.000000,97.629997,95.400490,10771100
+2016-11-22,97.400002,97.930000,97.269997,97.709999,95.478661,5323700
+2016-11-23,97.669998,98.279999,97.589996,98.260002,96.016113,5897400
+2016-11-25,98.709999,99.070000,98.500000,98.820000,96.563316,4396800
+2016-11-28,98.519997,99.199997,98.320000,98.970001,96.709892,7106700
+2016-11-29,99.239998,99.680000,98.970001,99.669998,97.393898,8396800
+2016-11-30,99.400002,100.290001,99.000000,99.120003,96.856476,11369200
+2016-12-01,99.129997,99.750000,98.620003,98.940002,96.680580,9958100
+2016-12-02,98.790001,99.080002,98.250000,98.500000,96.250618,7737200
+2016-12-05,98.980003,100.269997,98.680000,99.959999,97.677284,11331500
+2016-12-06,99.949997,100.720001,99.820000,100.660004,98.361305,8427500
+2016-12-07,100.510002,102.070000,100.440002,101.989998,99.660927,12255700
+2016-12-08,101.470001,103.959999,101.470001,103.379997,101.797714,16524800
+2016-12-09,103.489998,105.519997,103.180000,104.860001,103.255058,12173900
+2016-12-12,104.610001,104.699997,103.730003,104.059998,102.467308,7880200
+2016-12-13,104.199997,104.309998,103.709999,103.849998,102.260521,7393200
+2016-12-14,103.620003,104.410004,103.500000,104.050003,102.457458,7961900
+2016-12-15,103.849998,105.639999,103.820000,104.389999,102.792252,8745400
+2016-12-16,104.709999,104.730003,103.750000,103.910004,102.319603,11399500
+2016-12-19,105.699997,106.260002,105.070000,105.300003,103.688332,11475600
+2016-12-20,105.639999,106.199997,105.059998,105.459999,103.845879,8947300
+2016-12-21,104.610001,105.910004,104.510002,105.559998,103.944344,5202400
+2016-12-22,104.959999,105.489998,104.699997,105.419998,103.806488,5230800
+2016-12-23,105.239998,105.339996,104.750000,105.150002,103.540627,3231000
+2016-12-27,104.849998,105.559998,104.779999,105.169998,103.560310,3639500
+2016-12-28,104.989998,105.320000,104.059998,104.300003,102.703629,5178600
+2016-12-29,104.230003,104.769997,103.870003,104.559998,102.959648,5892000
+2016-12-30,104.570000,105.000000,103.959999,104.220001,102.624855,6983300

+ 5002 - 0
server/LEMON-master/dataset/sinewave.csv

@@ -0,0 +1,5002 @@
+sinewave
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985
+0.873736397
+0.90255357
+0.927808777
+0.949402346
+0.967249058
+0.98127848
+0.991435244
+0.997679266
+0.999985904
+0.998346054
+0.992766189
+0.983268329
+0.969889958
+0.952683874
+0.931717983
+0.907075026
+0.878852258
+0.847161063
+0.812126509
+0.773886863
+0.73259304
+0.688408006
+0.64150614
+0.592072543
+0.540302306
+0.486399742
+0.430577581
+0.373056127
+0.314062391
+0.253829194
+0.192594249
+0.130599223
+0.068088781
+0.005309624
+-0.057490488
+-0.120063711
+-0.182163097
+-0.243543569
+-0.303962886
+-0.3631826
+-0.420968998
+-0.477094024
+-0.531336178
+-0.583481391
+-0.633323869
+-0.680666907
+-0.725323664
+-0.7671179
+-0.805884672
+-0.841470985
+-0.873736397
+-0.90255357
+-0.927808777
+-0.949402346
+-0.967249058
+-0.98127848
+-0.991435244
+-0.997679266
+-0.999985904
+-0.998346054
+-0.992766189
+-0.983268329
+-0.969889958
+-0.952683874
+-0.931717983
+-0.907075026
+-0.878852258
+-0.847161063
+-0.812126509
+-0.773886863
+-0.73259304
+-0.688408006
+-0.64150614
+-0.592072543
+-0.540302306
+-0.486399742
+-0.430577581
+-0.373056127
+-0.314062391
+-0.253829194
+-0.192594249
+-0.130599223
+-0.068088781
+-0.005309624
+0.057490488
+0.120063711
+0.182163097
+0.243543569
+0.303962886
+0.3631826
+0.420968998
+0.477094024
+0.531336178
+0.583481391
+0.633323869
+0.680666907
+0.725323664
+0.7671179
+0.805884672
+0.841470985

+ 7 - 0
server/LEMON-master/lemon_requirements.txt

@@ -0,0 +1,7 @@
+pandas==0.23.0
+Pillow==5.1.0
+redis==3.3.2
+scikit-image==0.13.1
+scikit-learn==0.19.1
+scipy==1.1.0
+h5py==2.10

+ 0 - 0
server/LEMON-master/local_history.patch


BIN
server/LEMON-master/res.npy


+ 172 - 0
server/LEMON-master/run/SVNH_DatasetUtil.py

@@ -0,0 +1,172 @@
+import numpy as np
+import scipy.io as sio
+import os.path
+
+train_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "SVHN_train_32x32.mat")
+test_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "SVHN_test_32x32.mat")
+extra_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "SVHN_extra_32x32.mat")
+
+#default parameters for argparse
+# default_params = {
+#     "learning_rate": 0.001,
+#     "num_epochs": 25,
+#     "batch_size": 128,
+#     "train_data_file": "./assignment_httm_data/SVHN_train_32x32.mat",
+#     "test_data_file": "./assignment_httm_data/SVHN_test_32x32.mat",
+#     "extra_data_file": "./assignment_httm_data/SVHN_extra_32x32.mat",
+#     "load_extra": False,
+#     "model": "CNN1",
+#     "validation_percentage": 0.1,
+#     "data_shuffle": True,
+#     "preprocess": False,
+#     "mode": 'train',
+#     "runs_name": None,
+#     "tensorboard_dir": '~/tensorboard_runs'
+# }
+
+def load_raw_data(train_data_file, test_data_file, load_extra_data, extra_data_file):
+    """
+    Load RAW Google SVHN Digit Localization from .mat files
+    """
+    loading_information = "with Extra" if load_extra_data else "without Extra"
+    print("Loading SVHN dataset {}...".format(loading_information))
+    raw_train_data = sio.loadmat(train_data_file)
+    raw_test_data = sio.loadmat(test_data_file)
+    if load_extra_data:
+        raw_extra_data = sio.loadmat(extra_data_file)
+        print("Train size: {}, Test size: {}, Extra size: {}".format(raw_train_data['X'].shape[3],
+                                                                     raw_test_data['X'].shape[3],
+                                                                     raw_extra_data['X'].shape[3]))
+        return [raw_train_data, raw_test_data, raw_extra_data]
+    else:
+        print("Train size: {}, Test size: {}".format(raw_train_data['X'].shape[3],
+                                                     raw_test_data['X'].shape[3]))
+        return [raw_train_data, raw_test_data]
+
+
+def format_data(raw_data, number_of_examples):
+    """
+    Reshape RAW data to regular shape
+    """
+    old_shape = raw_data.shape
+    new_data = []
+    for i in range(number_of_examples):
+        new_data.append(raw_data[:, :, :, i])
+    new_data = np.asarray(new_data)
+    print("Data has been reshaped from {} to {}".format(raw_data.shape, new_data.shape))
+    return new_data / 255.
+
+
+def one_hot_encoder(data, number_of_labels):
+    """
+    One-hot encoder for labels
+    """
+    data_size = len(data)
+    one_hot_matrix = np.zeros(shape=(data_size, number_of_labels))
+    for i in range(data_size):
+        current_row = np.zeros(shape=(number_of_labels))
+        current_number = data[i][0]
+        if current_number == 10:
+            current_row[0] = 1
+        else:
+            current_row[current_number] = 1
+        one_hot_matrix[i] = current_row
+    return one_hot_matrix
+
+
+def load_svhn_data(train_path, test_path, extra_path, load_extra, eval_percentage):
+    """
+    Load SVHN Dataset
+    """
+    print("Loading SVHN dataset for classification...")
+    # Load raw dataset
+    if load_extra:
+
+        print("Found extra dataset, loading it...")
+        train, test, extra = load_raw_data(train_path, test_path, load_extra, extra_path)
+        train['X'] = np.concatenate((train['X'], extra['X']), axis=3)
+        train['y'] = np.concatenate((train['y'], extra['y']), axis=0)
+    else:
+        train, test = load_raw_data(train_path, test_path, load_extra, extra_path)
+
+    # get values and labels
+    train_all_values = format_data(train['X'], train['X'].shape[3])
+    train_all_labels = one_hot_encoder(train['y'], 10)
+    test_values = format_data(test['X'], test['X'].shape[3])
+    test_labels = one_hot_encoder(test['y'], 10)
+
+    np.random.seed(41)
+    shuffle_indices = np.random.permutation(np.arange(len(train_all_values)))
+    train_values_shuffled = train_all_values[shuffle_indices]
+    train_labels_shuffled = train_all_labels[shuffle_indices]
+
+    # Seperate into training and eval set
+    # Original setting split the data into training and validation samples
+    train_index = -1 * int(eval_percentage * float(len(train_values_shuffled)))
+    train_values, eval_values = train_values_shuffled[:train_index], train_values_shuffled[train_index:]
+    train_labels, eval_labels = train_labels_shuffled[:train_index], train_labels_shuffled[train_index:]
+    print("Train/Eval split: {:d}/{:d}".format(len(train_labels), len(eval_labels)))
+    print("Loading data completed")
+    return [train_values, train_labels, eval_values, eval_labels, test_values, test_labels]
+
+def my_load_svhn_data(train_path, test_path, extra_path, load_extra):
+    """
+    Load SVHN Dataset
+    """
+    print("Loading SVHN dataset for classification...")
+    # Load raw dataset
+    if load_extra:
+
+        print("Found extra dataset, loading it...")
+        train, test, extra = load_raw_data(train_path, test_path, load_extra, extra_path)
+        train['X'] = np.concatenate((train['X'], extra['X']), axis=3)
+        train['y'] = np.concatenate((train['y'], extra['y']), axis=0)
+    else:
+        train, test = load_raw_data(train_path, test_path, load_extra, extra_path)
+
+    # get values and labels
+    train_all_values = format_data(train['X'], train['X'].shape[3])
+    train_all_labels = one_hot_encoder(train['y'], 10)
+    test_values = format_data(test['X'], test['X'].shape[3])
+    test_labels = one_hot_encoder(test['y'], 10)
+
+    np.random.seed(41)
+    shuffle_indices = np.random.permutation(np.arange(len(train_all_values)))
+    train_values_shuffled = train_all_values[shuffle_indices]
+    train_labels_shuffled = train_all_labels[shuffle_indices]
+    print("Loading data completed")
+
+    return train_values_shuffled, train_labels_shuffled, test_values, test_labels
+
+    # Seperate into training and eval set
+    # # Original setting split the data into training and validation samples
+    # train_index = -1 * int(eval_percentage * float(len(train_values_shuffled)))
+    # train_values, eval_values = train_values_shuffled[:train_index], train_values_shuffled[train_index:]
+    # train_labels, eval_labels = train_labels_shuffled[:train_index], train_labels_shuffled[train_index:]
+    # print("Train/Eval split: {:d}/{:d}".format(len(train_labels), len(eval_labels)))
+    # print("Loading data completed")
+    # return [train_values, train_labels, eval_values, eval_labels, test_values, test_labels]
+
+def load_data():
+    train_X, train_Y, test_X, test_Y = my_load_svhn_data(train_path = train_path,
+                                                                      test_path = test_path,
+                                                                      extra_path = extra_path,
+                                                                      load_extra = False)
+
+    return (train_X, train_Y), (test_X, test_Y)
+
+
+if __name__ == "__main__":
+
+
+    # train_X, train_Y, eval_X, eval_Y, test_X, test_Y = load_svhn_data(train_path = train_path,
+    #                                                                   test_path = test_path,
+    #                                                                   extra_path = extra_path,
+    #                                                                   load_extra = True,
+    #                                                                   eval_percentage = 0.1
+    #                                                                  )
+    (train_X, train_Y), (test_X, test_Y) = load_data()
+    print(np.shape(train_X))
+    print(np.shape(train_Y))
+    print(np.shape(test_X))
+    print(np.shape(test_Y))

+ 0 - 0
server/LEMON-master/run/__init__.py


+ 550 - 0
server/LEMON-master/run/api_config_pool.json

@@ -0,0 +1,550 @@
+{
+  "Conv1D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same", "causal"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "Conv2D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "Conv3D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "SeparableConv1D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same", "causal"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "depth_multiplier": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "depthwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "pointwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "depthwise_regularizer": ["l1", "l2"],
+    "pointwise_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "depthwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "pointwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "trainable": [true, false]
+  },
+  "SeparableConv2D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "depth_multiplier": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "depthwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "pointwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "depthwise_regularizer": ["l1", "l2"],
+    "pointwise_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "depthwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "pointwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "DepthwiseConv1D": {
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "depth_multiplier": [0],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "depthwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "depthwise_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "depthwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "DepthwiseConv2D": {
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "depth_multiplier": [0],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "depthwise_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "depthwise_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "depthwise_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "Conv2DTranspose": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "output_padding": [0, null],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "Conv3DTranspose": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "output_padding": [0, null],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "MaxPooling1D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "MaxPooling2D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "MaxPooling3D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "AveragePooling1D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "AveragePooling2D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "AveragePooling3D": {
+    "pool_size": [0],
+    "strides": [0, null],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "GlobalMaxPooling1D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "GlobalMaxPooling2D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "GlobalMaxPooling3D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "GlobalAveragePooling1D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "GlobalAveragePooling2D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "GlobalAveragePooling3D": {
+    "data_format": ["channels_first", "channels_last"],
+    "keepdims": [true, false]
+  },
+  "LSTM": {
+    "units": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear", null],
+    "recurrent_activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear", null],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "unit_forget_bias": [true, false],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "dropout": [0],
+    "recurrent_dropout": [0],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "unroll": [true, false]
+  },
+  "GRU": {
+    "units": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "recurrent_activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "dropout": [0],
+    "recurrent_dropout": [0],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "unroll": [true, false],
+    "reset_after": [true, false]
+  },
+  "SimpleRNN": {
+    "units": [0],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "dropout": [0],
+    "recurrent_dropout": [0],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "unroll": [true, false]
+  },
+  "TimeDistributed": {
+  },
+  "Bidirectional": {
+    "merge_mode": ["sum", "mul", "concat", "ave", null]
+  },
+  "ConvLSTM1D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "recurrent_activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "unit_forget_bias": [true, false],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "dropout": [0],
+    "recurrent_dropout": [0]
+  },
+  "ConvLSTM2D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "recurrent_activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "unit_forget_bias": [true, false],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "dropout": [0],
+    "recurrent_dropout": [0]
+  },
+  "ConvLSTM3D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "dilation_rate": [0],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "recurrent_activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "recurrent_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "unit_forget_bias": [true, false],
+    "kernel_regularizer": ["l1", "l2"],
+    "recurrent_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "recurrent_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "return_sequences": [true, false],
+    "return_state": [true, false],
+    "go_backwards": [true, false],
+    "stateful": [true, false],
+    "dropout": [0],
+    "recurrent_dropout": [0]
+  },
+  "BatchNormalization": {
+    "axis": [-1, -2],
+    "momentum": [0],
+    "epsilon": [0],
+    "center": [true, false],
+    "scale": [true, false],
+    "beta_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "gamma_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "moving_mean_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "moving_variance_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "beta_regularizer": ["l1", "l2"],
+    "gamma_regularizer": ["l1", "l2"],
+    "beta_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "gamma_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "LayerNormalization": {
+    "axis": [-1, -2],
+    "epsilon": [0],
+    "center": [true, false],
+    "scale": [true, false],
+    "beta_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "gamma_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "beta_regularizer": ["l1", "l2"],
+    "gamma_regularizer": ["l1", "l2"],
+    "beta_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "gamma_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "Dropout": {
+  },
+  "SpatialDropout1D": {},
+  "SpatialDropout2D": {
+  },
+  "SpatialDropout3D": {
+  },
+  "GaussianDropout": {},
+  "GaussianNoise": {
+    "stddev": [0]
+  },
+  "ActivityRegularization": {
+    "l1": [0],
+    "l2": [0]
+  },
+  "AlphaDropout": {},
+  "Flatten": {
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "RepeatVector": {
+    "n": [0]
+  },
+  "Permute": {},
+  "Cropping1D": {
+    "cropping": [0]
+  },
+  "Cropping2D": {
+    "cropping": [0]
+  },
+  "Cropping3D": {
+    "cropping": [0]
+  },
+  "UpSampling1D": {
+    "size": [2, 3]
+  },
+  "UpSampling2D": {
+    "data_format": ["channels_first", "channels_last"],
+    "interpolation": ["nearest", "bilinear"]
+  },
+  "UpSampling3D": {
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "ZeroPadding1D": {
+    "padding": [0]
+  },
+  "ZeroPadding2D": {
+    "padding": [0],
+    "data_format": ["channels_first", "channels_last"]
+  },
+  "ZeroPadding3D": {
+    "padding": [0]
+  },
+  "Concatenate": {
+    "axis": [-1, -2]
+  },
+  "Average": {},
+  "Maximum": {},
+  "Minimum": {},
+  "Add": {},
+  "Subtract": {},
+  "Multiply": {},
+  "Dot": {
+    "normalize": [true, false]
+  },
+  "ReLU": {
+    "max_value": [0],
+    "negative_slope": [0],
+    "threshold": [0]
+  },
+  "Softmax": {
+    "axis": [-1, -2]
+  },
+  "LeakyReLU": {
+    "alpha": [0]
+  },
+  "PReLU": {
+    "shared_axes": [-1, -2],
+    "alpha_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "alpha_regularizer": ["l1", "l2"],
+    "alpha_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "ELU": {
+    "alpha": [0]
+  },
+  "ThresholdedReLU": {
+    "theta": [0]
+  },
+  "LocallyConnected1D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid"],
+    "data_format": ["channels_first", "channels_last"],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  },
+  "LocallyConnected2D": {
+    "filters": [0],
+    "kernel_size": [0],
+    "strides": [0],
+    "padding": ["valid", "same"],
+    "data_format": ["channels_first", "channels_last"],
+    "activation": ["softmax", "elu", "selu", "softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "exponential", "linear"],
+    "use_bias": [true, false],
+    "kernel_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling",
+      "Orthogonal", "lecun_uniform", "glorot_normal",  "glorot_uniform", "he_normal", "lecun_normal", "Identity"],
+    "bias_initializer": ["Zeros", "Ones", "Constant", "RandomNormal", "RandomUniform", "TruncatedNormal", "VarianceScaling", "Orthogonal", "glorot_normal",  "glorot_uniform", "he_normal", "he_uniform", "lecun_normal", "lecun_uniform", "Identity"],
+    "kernel_regularizer": ["l1", "l2"],
+    "bias_regularizer": ["l1", "l2"],
+    "activity_regularizer": ["l1", "l2"],
+    "kernel_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"],
+    "bias_constraint": ["Constraint", "MaxNorm", "MinMaxNorm", "NonNeg", "UnitNorm"]
+  }
+}

BIN
server/LEMON-master/run/data/SVHN_test_32x32.mat


BIN
server/LEMON-master/run/data/SVHN_train_32x32.mat


+ 0 - 0
server/LEMON-master/run/data/a.txt


BIN
server/LEMON-master/run/data/adv_image/bim_mnist_image.npy


BIN
server/LEMON-master/run/data/adv_image/bim_mnist_label.npy


BIN
server/LEMON-master/run/data/combined_data/fashion_combined_10000_image.npy


BIN
server/LEMON-master/run/data/combined_data/fashion_combined_10000_label.npy


BIN
server/LEMON-master/run/data/t10k-images-idx3-ubyte.gz


BIN
server/LEMON-master/run/data/t10k-labels-idx1-ubyte.gz


+ 34 - 0
server/LEMON-master/run/localization_executor.py

@@ -0,0 +1,34 @@
+"""
+# Part  of localization phase
+"""
+import os
+import sys
+import configparser
+from datetime import datetime
+
+config_name = sys.argv[1]
+
+lemon_cfg = configparser.ConfigParser()
+lemon_cfg.read(f"./config/{config_name}")
+parameters = lemon_cfg['parameters']
+python_prefix = parameters['python_prefix'].rstrip("/")
+
+# 1. get unique inconsistency
+start_time = datetime.now()
+print("Localization Starts!")
+print("\n\nPhase1: Get Unique Inconsistency")
+get_unique_inconsistency = f"{python_prefix}\lemon\python -u -m scripts.localization.get_unique_inconsistency {config_name}"
+os.system(get_unique_inconsistency)
+
+# 2. localization
+print("\n\nPhase2: Localize")
+localize = f"{python_prefix}\lemon\python  -u -m run.localize_lemon {config_name}"
+os.system(localize)
+
+# 3. get suspected bugs
+print("\n\nPhase3: Suspected bugs analysis")
+get_suspecte_bugs = f"{python_prefix}\lemon\python  -u -m scripts.localization.suspected_bugs_detector {config_name}"
+os.system(get_suspecte_bugs)
+
+print("Localization finishes!")
+print(f"Localization time cost: {datetime.now() - start_time}")

+ 202 - 0
server/LEMON-master/run/localize_lemon.py

@@ -0,0 +1,202 @@
+# -*-coding:UTF-8-*-
+"""
+# Part  of localization phase
+"""
+import argparse
+import sys
+import os
+import pickle
+import configparser
+from scripts.tools.utils import ModelUtils
+import keras
+from keras.engine.input_layer import InputLayer
+import warnings
+import datetime
+from scripts.logger.lemon_logger import Logger
+import shutil
+from itertools import combinations
+import keras.backend as K
+warnings.filterwarnings("ignore")
+
+
+def is_lstm_not_exists(exp_id,output_id):
+    if exp_id in ['lstm0-sinewave','lstm2-price'] and output_id in ['experiment4','experiment5']:
+        return True
+    else:
+        return False
+
+
+def get_HH_mm_ss(td):
+    days,seconds = td.days,td.seconds
+    hours = days * 24 + seconds // 3600
+    minutes = (seconds % 3600)//60
+    secs = seconds % 60
+    return hours,minutes,secs
+
+
+def generate_report(localize_res,savepath):
+    with open(savepath,"w+") as fw:
+        for localize_header, value in localize_res.items():
+            fw.write("current_layer, delta,Rl,previous_layer\n".format(localize_header))
+            for layer_res in value:
+                fw.write("{},{},{},{}\n".format(layer_res[0],layer_res[1],layer_res[2],layer_res[3]))
+
+
+def localize(mut_model_dir,select_idntfr, exp_name,localize_tmp_dir,report_dir,backends):
+    """
+    select_idntfrs: lenet5-mnist_origin0_input17
+    """
+    # get layer_output for all models coming from specific exp on all backends
+    identifier_split = select_idntfr.split("_")
+    data_index = int(identifier_split[-1][5:])
+    model_idntfr = "{}_{}".format(identifier_split[0], identifier_split[1])
+    if 'svhn' in model_idntfr or 'fashion2' in model_idntfr:
+        model_path = "{}/{}.hdf5".format(mut_model_dir, model_idntfr)
+    else:
+        model_path = "{}/{}.h5".format(mut_model_dir, model_idntfr)
+    #
+    # # check if indntfr hasn't been localized
+    # for bk1, bk2 in combinations(backends, 2):
+    #     report_path = os.path.join(report_dir, "{}_{}_{}_input{}.csv".format(model_idntfr, bk1, bk2, data_index))
+    #     # not exists; continue fo localize
+    #     if not os.path.exists(report_path):
+    #         break
+    # # all file exist; return
+    # else:
+    #     mylogger.logger.info(f"{select_idntfr} has been localized")
+    #     return
+
+    for bk in backends:
+        python_bin = f"{python_prefix}\{bk}\python"
+        return_stats = os.system(
+            f"{python_bin} -u -m run.patch_hidden_output_extractor --backend {bk} --output_dir {output_dir} --exp {exp_name}"
+            f" --model_idntfr {model_idntfr} --data_index {data_index} --config_name {config_name}")
+        # assert return_stats==0,"Getting hidden output failed!"
+        if return_stats != 0:
+            mylogger.logger.info("Getting hidden output failed!")
+            failed_list.append(select_idntfr)
+            return
+    mylogger.logger.info("Getting localization for {}".format(select_idntfr))
+    model = keras.models.load_model(model_path, custom_objects=ModelUtils.custom_objects())
+    for bk1, bk2 in combinations(backends, 2):
+        local_res = {}
+        local_res = get_outputs_divation_onbackends(model=model, backends=[bk1, bk2],
+                                                    model_idntfr=model_idntfr, local_res=local_res,
+                                                    data_index=data_index, localize_tmp_dir=localize_tmp_dir)
+        mylogger.logger.info("Generating localization report for {} on {}-{}!".format(model_idntfr,bk1,bk2))
+        report_path = os.path.join(report_dir, "{}_{}_{}_input{}.csv".format(model_idntfr,bk1,bk2, data_index))
+        generate_report(local_res, report_path)
+    del model
+    K.clear_session()
+
+
+def get_outputs_divation_onbackends(model,backends,model_idntfr,local_res,data_index,localize_tmp_dir):
+    backend1 = backends[0]
+    backend2 = backends[1]
+    with open(os.path.join(localize_tmp_dir, "{}_{}_{}".format(model_idntfr, backend1,data_index)), "rb") as fr:
+        model_layers_outputs_1 = pickle.load(fr)
+    with open(os.path.join(localize_tmp_dir, "{}_{}_{}".format(model_idntfr, backend2,data_index)), "rb") as fr:
+        model_layers_outputs_2 = pickle.load(fr)
+    divations = ModelUtils.layers_divation(model, model_layers_outputs_1, model_layers_outputs_2)
+    compare_res = []
+
+    for i, layer in enumerate(model.layers):
+        if isinstance(layer, InputLayer):
+            continue
+        delta, divation, inputlayers = divations[i]
+        layer_compare_res = [layer.name, delta[0], divation[0],",".join(inputlayers)]  # batch accepted default
+        compare_res.append(layer_compare_res)
+    identifier = "{}_{}_{}_input_{}".format(model_idntfr,backend1,backend2,data_index)
+    idntfr_localize = "{}_localize".format(identifier)
+    local_res[idntfr_localize] = compare_res
+    return local_res
+
+
+if __name__ == "__main__":
+
+    starttime = datetime.datetime.now()
+
+    # get id of experiments
+    config_name = sys.argv[1]
+    lemon_cfg = configparser.ConfigParser()
+    lemon_cfg.read(f"./config/{config_name}")
+    parameters = lemon_cfg['parameters']
+
+    output_dir = parameters['output_dir']
+    output_dir = output_dir[:-1] if output_dir.endswith("/") else output_dir
+    current_container = os.path.split(output_dir)[-1]
+    python_prefix = parameters['python_prefix'].rstrip("/")
+
+    """Initialization"""
+    mylogger = Logger()
+    backend_choices = [1,2,3]
+    exps = parameters['exps'].lstrip().rstrip().split(" ")
+    exps.sort(key=lambda x: x)
+    all_model_inputs = {e:set() for e in exps}
+    items_lists = list()
+    for backend_choice in backend_choices:
+        if backend_choice == 1:
+            pre_backends = ['tensorflow', 'theano', 'cntk']
+        elif backend_choice == 2:
+            pre_backends = ['tensorflow', 'theano', 'mxnet']
+        else:
+            pre_backends = ['tensorflow', 'cntk', 'mxnet']
+        backends_str = "-".join(pre_backends)
+        backend_pairs = [f"{pair[0]}_{pair[1]}" for pair in combinations(pre_backends, 2)]
+
+        with open(os.path.join(output_dir, f"localize_model_inputs-{backends_str}.pkl"), "rb") as fr:
+            localize_model_inputs = pickle.load(fr)
+            for exp_id,model_set in localize_model_inputs.items():
+                if exp_id in exps:
+                    for mi in model_set:
+                        all_model_inputs[exp_id].add(mi)
+
+    for exp,mi_set in all_model_inputs.items():
+        print(exp,len(mi_set))
+    failed_list = []
+    """Print result of inconsistency distribution"""
+    for exp_idntfr,model_inputs_set in all_model_inputs.items():
+        if len(model_inputs_set) > 0:
+            if exp_idntfr == 'inception.v3-imagenet' or exp_idntfr == 'densenet121-imagenet' or is_lstm_not_exists(exp_idntfr,current_container):
+                # inception and densenet can't run on mxnet.
+                # lstm can't run on mxnet before mxnet version 1.3.x
+                backends = ['tensorflow', 'theano', 'cntk']
+            else:
+                backends = ['tensorflow', 'theano', 'cntk','mxnet']
+            print("Localize for {} : {} left.".format(exp_idntfr,len(model_inputs_set)))
+            mut_dir = os.path.join(output_dir,exp_idntfr, "mut_model")
+            localization_dir = os.path.join(output_dir,exp_idntfr, "localization_result")
+            localize_output_dir = os.path.join(output_dir,exp_idntfr, "localize_tmp")
+
+            """make dir for hidden_output and localization dir """
+            if not os.path.exists(localize_output_dir):
+                os.makedirs(localize_output_dir)
+            if not os.path.exists(localization_dir):
+                os.makedirs(localization_dir)
+
+            """Localization"""
+            for idx,select_identifier in enumerate(model_inputs_set):
+                print("{} of {} {}".format(idx,len(model_inputs_set),select_identifier))
+                localize(mut_model_dir=mut_dir,select_idntfr=select_identifier,exp_name=exp_idntfr,
+                         localize_tmp_dir=localize_output_dir,report_dir=localization_dir
+                         ,backends=backends)
+
+            shutil.rmtree(localize_output_dir)
+
+    with open(os.path.join(output_dir, f"failed_idntfrs.txt"), "w") as fw:
+        if len(failed_list) > 0:
+            mylogger.logger.warning(f"{len(failed_list)} idntfrs fail to localize")
+            lists = [f"{line} \n" for line in failed_list]
+            fw.writelines(lists)
+        else:
+            mylogger.logger.info("all idntfrs localize successfully")
+
+    endtime = datetime.datetime.now()
+    time_delta = endtime - starttime
+    h,m,s = get_HH_mm_ss(time_delta)
+    mylogger.logger.info("Localization precess is done: Time used: {} hour,{} min,{} sec".format(h,m,s))
+
+
+
+
+

+ 438 - 0
server/LEMON-master/run/model2json.py

@@ -0,0 +1,438 @@
+import keras
+import json
+import os
+from itertools import product
+import sys
+sys.path.append("../")
+# from scripts.coverage import custom_objects
+import tensorflow as tf
+import psutil
+import scripts.tools.utils as utils_tools
+# folder_path = 'model_json'
+# api_config_pool_path = 'api_config_pool.json'
+PARAMETER_SPACE = 5
+import configparser
+
+def custom_objects():
+    def no_activation(x):
+        return x
+
+    def leakyrelu(x):
+        import keras.backend as K
+        return K.relu(x, alpha=0.01)
+
+    objects = {}
+    objects['no_activation'] = no_activation
+    objects['leakyrelu'] = leakyrelu
+    return objects
+
+
+def extract_edges(model):
+    layer_list = model.layers
+
+    existing_edges = []
+    for layer in layer_list:
+        start_layer_class = layer.__class__.__name__
+        if "Input" in start_layer_class:
+            continue
+        for node in layer._outbound_nodes:
+            end_layer_class = node.outbound_layer.__class__.__name__
+            edge = (start_layer_class, end_layer_class)  # edge should be direct
+            if edge not in existing_edges:
+                existing_edges.append(edge)
+    return existing_edges
+
+
+def extract_nodes(model):
+    """
+    existing_nodes: {"layer_name1": [layer_config1, layer_config2], "layer_name2": [], ...}
+    """
+    layer_list = model.layers
+    existing_nodes = {}
+
+    for layer in layer_list:
+        layer_config = layer.get_config()
+        layer_config.pop("name")
+        if "filters" in layer_config: layer_config.pop("filters")
+        if "units" in layer_config: layer_config.pop("units")
+        layer_class = layer.__class__.__name__
+        if 'Input' in layer_class:
+            continue
+        if layer_class not in existing_nodes:
+            existing_nodes[layer_class] = []
+        if layer_config not in existing_nodes[layer_class]:
+            existing_nodes[layer_class].append(layer_config)
+    return existing_nodes
+
+
+def extract_inputs(model):
+    """
+    existing_inputs: {"layer_class": {"input_dims": [], "dtype": [], "shape": []}}
+    layer_dims: {"layer_class": {"input_dims": [], "output_dims": []}}
+    """
+    # if model.__class__.__name__ == 'Sequential':
+    #     layer_list = model.layers
+    # else:
+    #     layer_list = model.layers[1:]  # ignore the first input layer
+    layer_list = model.layers
+    existing_inputs = {}
+    layer_dims = {}
+    for layer in layer_list:
+        layer_class = layer.__class__.__name__
+        if 'Input' in layer_class:
+            continue
+        if layer_class not in existing_inputs:
+            existing_inputs[layer_class] = {"input_dims": [], "dtype": [], "shape": []}
+            layer_dims[layer_class] = {"input_dims": [], "output_dims": []}
+        input_dims = len(layer.input.shape)
+        output_dims = len(layer.output.shape)
+        dtype = str(layer.input.dtype.name)
+        shape = str(list(layer.input.shape))
+        if input_dims not in existing_inputs[layer_class]['input_dims']:
+            existing_inputs[layer_class]['input_dims'].append(input_dims)
+        if input_dims not in layer_dims[layer_class]['input_dims']:
+            layer_dims[layer_class]['input_dims'].append(input_dims)
+        if output_dims not in layer_dims[layer_class]['output_dims']:
+            layer_dims[layer_class]['output_dims'].append(output_dims)
+        if dtype not in existing_inputs[layer_class]['dtype']:
+            existing_inputs[layer_class]['dtype'].append(dtype)
+        if shape not in existing_inputs[layer_class]['shape']:
+            existing_inputs[layer_class]['shape'].append(shape)
+    return existing_inputs, layer_dims
+
+
+def model_to_json(model_path, folder_path):
+    cur_model = keras.models.load_model(model_path, custom_objects=custom_objects())
+    cur_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
+
+    existing_edges = extract_edges(cur_model)
+    existing_node = extract_nodes(cur_model)
+    existing_inputs, layer_dims = extract_inputs(cur_model)
+    layer_dict = {}
+    cur_edge_num = 0
+    for layer in cur_model.layers:
+        layer_name = layer.__class__.__name__
+        if 'Input' in layer_name:
+            continue
+        layer_dict[layer_name] = layer_dict[layer_name] + 1 if layer_name in layer_dict else 1
+        inbound_nodes = layer._inbound_nodes
+        if inbound_nodes:
+            if isinstance(inbound_nodes[0].inbound_layers, list):
+                cur_edge_num += len(inbound_nodes[0].inbound_layers)
+            else:
+                if inbound_nodes:
+                    cur_edge_num += 1
+
+    layer_num = sum(layer_dict.values())
+    layer_type = list(layer_dict.keys())
+    print(f'layer_num:{layer_num}')
+    print(f'layer_type_num:{layer_type}')
+    print(f'cur_edge_num:{cur_edge_num}')
+    print(existing_edges)
+    print(existing_node)
+    print(existing_inputs)
+    print(layer_dims)
+
+    cur_model_info = {}
+    cur_model_info['edges'] = existing_edges
+    cur_model_info['layer_config'] = existing_node
+    cur_model_info['layer_input_info'] = existing_inputs
+    cur_model_info['layer_num'] = layer_num
+    cur_model_info['layer_type'] = layer_type
+    cur_model_info['cur_edge_num'] = cur_edge_num
+    cur_model_info['layer_dims'] = layer_dims
+    if 'svhn' in model_path or 'fashion2' in model_path:
+        json_path = model_path.replace("hdf5","json")+".json"
+    else:
+        json_path = os.path.join(folder_path, model_path.split("\\")[-1][:-3] + '.json')
+    # if not os.path.exists(json_path):
+    for op in cur_model_info['layer_config']:
+        for config in cur_model_info['layer_config'][op]:
+            for config_key, config_value in config.items():
+                if not (isinstance(config_value, int) or isinstance(config_value, str) or isinstance(config_value,
+                                                                                                     list) or isinstance(
+                    config_value, float) or isinstance(config_value, dict) or isinstance(config_value,
+                                                                                         set) or isinstance(
+                    config_value, tuple)):
+                    config[config_key] = str(config_value)
+
+    try:
+        with open(json_path, 'w') as json_file:
+            json.dump(cur_model_info, json_file, indent=4)
+    except Exception as e:
+        print("!!!!!!!")
+        print(cur_model_info)
+        raise e
+
+
+def union_json(single_json_path, all_json_path):
+    """
+    single_json_path:单个json文件的路径
+    all_json_path:分母json文件的路径
+    """
+    with open(single_json_path, 'r') as json_file:
+        model_info = json.load(json_file)
+    if not os.path.exists(all_json_path):
+        all_layer_info = {}
+    else:
+        with open(all_json_path, 'r') as all_json_file:
+            all_layer_info = json.load(all_json_file)
+
+    if 'layer_config' not in all_layer_info.keys():
+        all_layer_info['layer_config'] = {}
+    for layer_class, layer_configs in model_info['layer_config'].items():
+        if layer_class not in all_layer_info['layer_config'].keys():
+            all_layer_info['layer_config'][layer_class] = layer_configs
+        else:
+            for layer_config in layer_configs:
+                if layer_config not in all_layer_info['layer_config'][layer_class]:
+                    all_layer_info['layer_config'][layer_class].append(layer_config)
+
+    if 'layer_input_info' not in all_layer_info.keys():
+        all_layer_info['layer_input_info'] = {}
+    for layer_class, layer_input_info in model_info['layer_input_info'].items():
+        if layer_class not in all_layer_info['layer_input_info'].keys():
+            all_layer_info['layer_input_info'][layer_class] = layer_input_info
+        else:
+            for attr in ["input_dims", "dtype", "shape"]:
+                if attr not in all_layer_info['layer_input_info'][layer_class].keys():
+                    all_layer_info['layer_input_info'][layer_class][attr] = layer_input_info[attr]
+                else:
+                    all_layer_info['layer_input_info'][layer_class][attr] = list(
+                        set(layer_input_info[attr]).union(set(all_layer_info['layer_input_info'][layer_class][attr])))
+
+    if 'layer_dims' not in all_layer_info.keys():
+        all_layer_info['layer_dims'] = {}
+    for layer_class, layer_dims in model_info['layer_dims'].items():
+        if layer_class not in all_layer_info['layer_dims'].keys():
+            all_layer_info['layer_dims'][layer_class] = layer_dims
+        else:
+            for attr in ["input_dims", "output_dims"]:
+                if attr not in all_layer_info['layer_dims'][layer_class].keys():
+                    all_layer_info['layer_dims'][layer_class][attr] = layer_dims[attr]
+                else:
+                    all_layer_info['layer_dims'][layer_class][attr] = list(
+                        set(layer_dims[attr]).union(set(all_layer_info['layer_dims'][layer_class][attr])))
+
+    if 'layer_type' not in all_layer_info.keys():
+        all_layer_info['layer_type'] = model_info['layer_type']
+    else:
+        all_layer_info['layer_type'] = list(set(model_info['layer_type']).union(set(all_layer_info['layer_type'])))
+
+    if 'max_edge_num' not in all_layer_info.keys():
+        all_layer_info['max_edge_num'] = model_info['cur_edge_num']
+    else:
+        all_layer_info['max_edge_num'] = max(all_layer_info['max_edge_num'], model_info['cur_edge_num'])
+
+    if 'max_layer_num' not in all_layer_info.keys():
+        all_layer_info['max_layer_num'] = model_info['layer_num']
+    else:
+        all_layer_info['max_layer_num'] = max(all_layer_info['max_layer_num'], model_info['layer_num'])
+
+    with open(all_json_path, 'w') as json_file:
+        json.dump(all_layer_info, json_file, indent=4)
+
+
+class CoverageCalculatornew:
+
+    # init里只能是和具体模型无关的数值的初始化
+    def __init__(self, all_json_path, api_config_pool_path):
+        self.all_layer_info = {}
+        self.edges = []
+        self.all_edges = []
+        self.layer_config = {}
+        self.layer_input_info = {}
+        self.POSSIBLE_DTYPE = {'bfloat16', 'double', 'float16', 'float32', 'float64', 'half'}
+
+        with open(api_config_pool_path, "r") as pool_file:
+            self.api_config_pool = json.load(pool_file)
+        with open(all_json_path, 'r') as json_file:
+            self.all_layer_info = json.load(json_file)
+
+        self.total_dtype_num = len(self.all_layer_info["layer_input_info"]) * len(self.POSSIBLE_DTYPE)
+        self.total_shape_num = len(self.all_layer_info["layer_input_info"]) * PARAMETER_SPACE
+        self.total_ndims_num = 0
+        for layer_class in self.all_layer_info["layer_input_info"]:
+            ndims_list = self.all_layer_info["layer_input_info"][layer_class]["input_dims"]
+            self.total_ndims_num += len(ndims_list)
+        self.total_input_num = self.total_ndims_num + self.total_dtype_num + self.total_shape_num
+
+        self.total_param = {}
+        # self.total_param_list = {}
+        self.total_param_num = 0
+        for layer_class in self.api_config_pool:
+            self.total_param[layer_class] = 0
+            # self.total_param_list[layer_class] = {}
+            for config in self.api_config_pool[layer_class]:
+                # self.total_param_list[layer_class][config] = []
+                if self.api_config_pool[layer_class][config] == [0]:
+                    self.total_param[layer_class] += PARAMETER_SPACE
+                else:
+                    self.total_param[layer_class] += len(self.api_config_pool[layer_class][config])
+            self.total_param_num += self.total_param[layer_class]
+
+        for pre_layer, next_layer in product(self.all_layer_info["layer_dims"].keys(), repeat=2):
+            if set(self.all_layer_info["layer_dims"][pre_layer]["output_dims"]).intersection(
+                    set(self.all_layer_info["layer_dims"][next_layer]["input_dims"])) != 0:
+                self.all_edges.append([pre_layer, next_layer])
+
+        self.max_edge_num = self.all_layer_info['max_edge_num']
+        self.max_layer_num = self.all_layer_info['max_layer_num']
+        self.layer_type = len(self.all_layer_info["layer_type"])
+
+        self.cur_edge_num = 0
+        self.cur_layer_num = 0
+        self.cur_layer_type = 0
+
+    def load_json(self, json_path):
+        with open(json_path, 'r') as json_file:
+            model_info = json.load(json_file)
+
+        self.cur_edge_num = model_info["cur_edge_num"]
+        self.cur_layer_num = model_info['layer_num']
+        self.cur_layer_type = len(model_info['layer_type'])
+        self.edges = []
+        self.layer_config = {}
+        self.layer_input_info = {}
+
+        for edge in model_info['edges']:
+            if edge not in self.edges:
+                self.edges.append(edge)
+
+        for class_type, configs in model_info["layer_config"].items():
+            if class_type not in self.layer_config:
+                self.layer_config[class_type] = configs
+            else:
+                for config in configs:
+                    if config not in self.layer_config[class_type]:
+                        self.layer_config[class_type].append(config)
+        print(self.layer_config)
+
+        for layer_class, layer_input_info in model_info['layer_input_info'].items():
+            if layer_class not in self.layer_input_info:
+                self.layer_input_info[layer_class] = layer_input_info
+            else:
+                for attr in ["input_dims", "dtype", "shape"]:
+                    if attr not in self.layer_input_info[layer_class].keys():
+                        self.layer_input_info[layer_class][attr] = layer_input_info[attr]
+                    else:
+                        self.layer_input_info[layer_class][attr] = list(
+                            set(layer_input_info[attr]).union(
+                                set(self.layer_input_info[layer_class][attr])))
+
+    def api_pair_coverage(self):
+        # print(f"The API Pair Coverage Is: {len(self.edges)}/{len(self.all_edges)}")
+        return len(self.edges) / len(self.all_edges)
+
+    def _layer_config_coverage(self, layer_config_list, layer_class):
+        """
+            hp: count of param_value.
+            param_list: {param1: [value1, value2], ...}
+        """
+        config_pool = self.api_config_pool[layer_class]
+        param_list = {}
+        for param in config_pool:
+            param_list[param] = []
+        hp = 0
+        # Journal Submitted Version is Below.
+        for layer_config in layer_config_list:
+            for param in layer_config:
+                if param not in param_list:
+                    continue
+                if config_pool[param] == [0]:
+                    if layer_config[param] not in param_list[param] and len(param_list[param]) <= PARAMETER_SPACE:
+                        param_list[param].append(layer_config[param])
+                        hp += 1
+                else:
+                    if layer_config[param] not in param_list[param]:
+                        param_list[param].append(layer_config[param])
+                        hp += 1
+        return hp, param_list
+
+    def config_coverage(self):
+        total_hp = 0
+        for layer_class in self.layer_config:
+            if layer_class in self.api_config_pool:
+                layer_config_list = self.layer_config[layer_class]
+                hp, param_list = self._layer_config_coverage(layer_config_list, layer_class)
+                total_hp += hp
+        # print(f"The Configuration Coverage is: {total_hp}/{self.total_param_num}")
+        return total_hp / self.total_param_num
+
+    def ndims_coverage(self):
+        """
+        ndims_cov
+        """
+        covered_ndims_num = 0
+        for layer_class in self.layer_input_info:
+            ndims_list = self.layer_input_info[layer_class]["input_dims"]
+            covered_ndims_num += len(ndims_list)
+        return covered_ndims_num
+
+    def dtype_coverage(self):
+        covered_dtype_num = 0
+        for layer_class in self.layer_input_info:
+            dtype_list = self.layer_input_info[layer_class]["dtype"]
+            covered_dtype_num += len(dtype_list)
+        return covered_dtype_num
+
+    def shape_coverage(self):
+        covered_shape_num = 0
+        for layer_class in self.layer_input_info:
+            shape_list = self.layer_input_info[layer_class]["shape"]
+            covered_shape_num += min(len(shape_list),
+                                     PARAMETER_SPACE)  # if the total number of shape is larger that SHAPE_SPACE, we set it as 100%
+        return covered_shape_num
+
+    def input_coverage(self):
+        """
+        input_cov = ndim_cov + dtype_cov + shape_cov
+        """
+        covered_ndims = self.ndims_coverage()
+        covered_dtype = self.dtype_coverage()
+        covered_shape = self.shape_coverage()
+        print(f"The NDims Coverage Is: {covered_ndims}/{self.total_ndims_num}")
+        print(f"The DType Coverage Is: {covered_dtype}/{self.total_dtype_num}")
+        print(f"The Shape Coverage Is: {covered_shape}/{self.total_shape_num}")
+        print(f"The Input Coverage Is: {covered_ndims + covered_dtype + covered_shape}/{self.total_input_num}")
+        input_cov = (covered_ndims + covered_dtype + covered_shape) / self.total_input_num
+        ndims_cov = covered_ndims / self.total_ndims_num
+        dtype_cov = covered_dtype / self.total_dtype_num
+        shape_cov = covered_shape / self.total_shape_num
+        return input_cov, ndims_cov, dtype_cov, shape_cov
+
+    def op_type_cover(self):
+        print(f'op_type_cover is: {self.cur_layer_type}/{self.layer_type}')
+        return self.cur_layer_type / self.layer_type
+
+    def op_num_cover(self):
+        print(f'op_num_cover is: {self.cur_layer_num}/{self.max_layer_num}')
+        return self.cur_layer_num / self.max_layer_num
+
+    def edge_cover(self):
+        print(f'edge_cover is: {self.cur_edge_num}/{self.max_edge_num}')
+        return self.cur_edge_num / self.max_edge_num
+
+    def cal_coverage(self):
+        input_cov, ndims_cov, dtype_cov, shape_cov = self.input_coverage()
+        config_cov = self.config_coverage()
+        api_cov = self.api_pair_coverage()
+        op_type_cov = self.op_type_cover()
+        op_num_cov = self.op_num_cover()
+        edge_cov = self.edge_cover()
+        return input_cov, config_cov, api_cov, op_type_cov, op_num_cov, edge_cov
+
+# if __name__ == '__main__':
+#     model_path1 = 'data/mnist_output/000005/models/tensorflow.h5'
+#     model_path2 = 'data/mnist_output/000004/models/tensorflow.h5'
+#     all_json_path = os.path.join(folder_path, "all_layer_info.json")
+#     model_to_json(model_path1)
+#     model_to_json(model_path2)
+#     for file in os.listdir(folder_path):
+#         if file != 'all_layer_info.json':
+#             file_path = os.path.join(folder_path, file)
+#             union_json(file_path, all_json_path)
+#
+#     cal_cov = CoverageCalculator(all_json_path)
+#     cal_cov.load_json('model_json/000004.json')
+#     cal_cov.cal_coverage()

+ 143 - 0
server/LEMON-master/run/model_to_txt.py

@@ -0,0 +1,143 @@
+import os
+import keras
+import psutil
+import configparser
+import os
+import sys
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+import scripts.tools.utils as tools_utils
+import argparse
+import json
+class LossHistory(keras.callbacks.Callback):
+    def on_train_begin(self, logs=None):
+        self.losses = []
+        self.acc = []
+        self.mem_info_list = []
+
+    def on_batch_end(self, batch, logs=None):
+        process = psutil.Process()
+        self.losses.append(logs.get('loss'))
+        self.acc.append(logs.get('acc'))
+        self.mem_info_list.append(process.memory_info().rss / (1024**3))
+
+def save_log_txt(model, path, name, bk, x_train, y_train):
+    history_loss = LossHistory()
+    model.fit(x_train, y_train, epochs=2, batch_size=1024, validation_split=0.2, verbose=1, callbacks=[history_loss])
+
+    model_name = name.split("/")[-1].split("_")[0]
+    method_name = name.split("/")[-1].split("_")[1]
+    valid_acc = [acc for acc in history_loss.acc if acc is not None]
+    result_data = {
+        "model": model_name,
+        "method": method_name,
+        "result": {
+            "Losses": sum(history_loss.losses)/len(history_loss.losses),
+            "Accuracy": sum(valid_acc)/len(valid_acc),
+            "MemoryInfoList": sum(history_loss.mem_info_list)/len(history_loss.mem_info_list)
+        }
+    }
+
+    if os.path.exists(path):
+        with open(path, 'r') as json_file:
+            data = json.load(json_file)
+    else:
+        data = []
+    data.append(result_data)
+    
+    with open(path, 'w') as json_file:
+        json.dump(data, json_file, indent=4)
+        
+
+def custom_objects():
+    def no_activation(x):
+        return x
+    def leakyrelu(x):
+        import keras.backend as K
+        return K.relu(x, alpha=0.01)
+    objects = {}
+    objects['no_activation'] = no_activation
+    objects['leakyrelu'] = leakyrelu
+    return objects
+
+def model_to_txt1(model_path, bk):
+    
+    cur_model = keras.models.load_model(model_path, custom_objects=custom_objects())
+    cur_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
+
+    data = tools_utils.DataUtils
+    
+    
+    if 'svhn' in model_path or 'fashion2' in model_path:
+        txt_path = model_path.split("\\")[-1][:-5].split("mut_model")[0]+bk+".json"
+    else:
+        txt_path = model_path.split("\\")[-1][:-3].split("mut_model")[0]+bk+".json"
+    if 'svhn' in model_path or 'fashion2' in model_path:
+        model_path = model_path.split("\\")[-1][:-5]
+    else:
+        model_path = model_path.split("\\")[-1][:-3]
+    data_path = model_path.split("/")[-1]
+    x_test, y_test = data.get_data_by_exp(data_path)
+    save_log_txt(cur_model,txt_path,model_path,bk,x_test, y_test)
+
+
+if __name__ == "__main__":
+
+    """Parser of command args"""
+    parse = argparse.ArgumentParser()
+    parse.add_argument("--backend", type=str, help="name of backends")
+    parse.add_argument("--model_path", type=str, help="redis db port")
+    parse.add_argument("--root_dir", type=str, help="redis db port")
+    flags, unparsed = parse.parse_known_args(sys.argv[1:])
+
+    """Load Configuration"""
+    lemon_cfg = configparser.ConfigParser()
+    # lemon_cfg.read(f"./config/{flags.config_name}")
+    
+    conf_path = os.path.join(os.path.dirname(os.getcwd()), "config", "demo.conf")
+    lemon_cfg.read(conf_path)
+    parameters = lemon_cfg['parameters']
+    gpu_ids = parameters['gpu_ids']
+    gpu_list = parameters['gpu_ids'].split(",")
+
+    """Init cuda"""
+    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+    os.environ["CUDA_VISIBLE_DEVICES"] = gpu_ids
+
+    """Switch backend"""
+    bk_list = ['tensorflow', 'theano', 'cntk', 'mxnet']
+    bk = flags.backend
+    print('.........................',type(bk))
+    os.environ['KERAS_BACKEND'] = bk
+    os.environ['PYTHONHASHSEED'] = '0'
+
+    if bk == 'tensorflow':
+        os.environ["TF_CPP_MIN_LOG_LEVEL"] = '2'  # 只显示 warning 和 Error
+        import tensorflow as tf
+
+    if bk == 'theano':
+        if len(gpu_list) == 2:
+            os.environ[
+                'THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]};dev{gpu_list[1]}->cuda{gpu_list[1]}," \
+                                  f"force_device=True,floatX=float32,lib.cnmem=1"
+        else:
+            os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]}," \
+                                         f"force_device=True,floatX=float32,lib.cnmem=1"
+        batch_size = 32
+        import theano as th
+
+        mylogger.info(th.__version__)
+    if bk == "cntk":
+        batch_size = 32
+        from cntk.device import try_set_default_device, gpu
+        try_set_default_device(gpu(int(gpu_list[0])))
+        import cntk as ck
+
+    if bk == "mxnet":
+        batch_size = 32
+        import mxnet as mxnet
+
+    from keras import backend as K
+    import keras
+    print("Using {} as backend for states extraction| {} is wanted".format(K.backend(),bk))
+    """Get model hidden output on selected_index data on specific backend"""
+    model_to_txt1(flags.model_path, bk)

+ 640 - 0
server/LEMON-master/run/mutate_lemon.py

@@ -0,0 +1,640 @@
+# -*-coding:UTF-8-*-
+import csv
+from itertools import *
+import keras
+import json
+import networkx as nx
+import sys
+
+# sys.path.append("../")
+import os
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
+
+from scripts.logger.lemon_logger import Logger
+from scripts.tools.mutator_selection_logic import Roulette, MCMC
+from scripts.mutation.model_mutation_generators import *
+
+import argparse
+
+import ast
+
+import numpy as np
+from scripts.mutation.mutation_utils import *
+import pickle
+from scripts.tools import utils
+from scripts.tools.utils import ModelUtils
+import shutil
+import re
+import datetime
+import configparser
+import warnings
+import math
+
+lines = 0
+# np.random.seed(20200501)
+warnings.filterwarnings("ignore")
+os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+os.environ["CUDA_VISIBLE_DEVICES"] = ""
+import psutil
+
+
+def partially_nan_or_inf(predictions, bk_num):  # 检查是否无穷大或空
+    """
+    Check if there is NAN in the result
+    """
+
+    def get_nan_num(nds):
+        _nan_num = 0
+        for nd in nds:
+            if np.isnan(nd).any() or np.isinf(nd).any():
+                _nan_num += 1
+        return _nan_num
+
+    if len(predictions) == bk_num:
+        for input_predict in zip(*predictions):
+            nan_num = get_nan_num(input_predict)
+            if 0 < nan_num < bk_num:
+                return True
+            else:
+                continue
+        return False
+    else:
+        raise Exception("wrong backend amounts")
+
+
+def get_selector_by_startegy_name(mutator_s, mutant_s):
+    mutant_strategy_dict = {"ROULETTE": Roulette}
+    mutator_strategy_dict = {"MCMC": MCMC}
+    return mutator_strategy_dict[mutator_s], mutant_strategy_dict[mutant_s]
+
+
+def save_mutate_history(selector, invalid_history: dict, mutant_history: list):
+    mutator_history_path = os.path.join(experiment_dir, "mutator_history.csv")
+    mutant_history_path = os.path.join(experiment_dir, "mutant_history.txt")
+    with open(mutator_history_path, "w+") as fw:
+        fw.write("Name,Success,Invalid,Total\n")
+        for op in invalid_history.keys():
+            mtrs = selector.mutators[op]
+            invalid_cnt = invalid_history[op]
+            fw.write(
+                "{},{},{},{}\n".format(
+                    op, mtrs.delta_bigger_than_zero, invalid_cnt, mtrs.total
+                )
+            )
+    with open(mutant_history_path, "w+") as fw:
+        for mutant in mutant_history:
+            fw.write("{}\n".format(mutant))
+
+
+def is_nan_or_inf(t):
+    if math.isnan(t) or math.isinf(t):
+        return True
+    else:
+        return False
+
+
+def continue_checker(**run_stat):  # 判断算法是否满足退出条件
+    start_time = run_stat["start_time"]
+    time_limitation = run_stat["time_limit"]
+    cur_counters = run_stat["cur_counters"]
+    counters_limit = run_stat["counters_limit"]
+    s_mode = run_stat["stop_mode"]
+
+    # if timing
+    # 时间限制
+    if s_mode == "TIMING":
+        hours, minutes, seconds = utils.ToolUtils.get_HH_mm_ss(
+            datetime.datetime.now() - start_time
+        )
+        total_minutes = hours * 60 + minutes
+        mutate_logger.info(
+            f"INFO: Mutation progress: {total_minutes}/{time_limitation} Minutes!"
+        )
+        if total_minutes < time_limitation:
+            return True
+        else:
+            return False
+    # if counters
+    # 次数限制,size(models)<N
+    elif s_mode == "COUNTER":
+        if cur_counters < counters_limit:
+            mutate_logger.info(
+                "INFO: Mutation progress {}/{}".format(cur_counters + 1, counters_limit)
+            )
+            return True
+        else:
+            return False
+    else:
+        raise Exception(f"Error! Stop Mode {s_mode} not Found!")
+
+
+def calc_inner_div(model):
+    graph = nx.DiGraph()
+    for layer in model.layers:
+        graph.add_node(layer.name)
+        for inbound_node in layer._inbound_nodes:
+            if inbound_node.inbound_layers:
+                for parent_layer in inbound_node.inbound_layers:
+                    graph.add_edge(parent_layer.name, layer.name)
+    longest_path = nx.dag_longest_path(graph)
+    return len(longest_path) / len(graph)
+
+
+def _generate_and_predict(
+    res_dict, filename, mutate_num, mutate_ops, test_size, exp, backends
+):
+    # 主算法函数
+    """
+    Generate models using mutate operators and store them
+    """
+    mutate_op_history = {k: 0 for k in mutate_ops}
+    mutate_op_invalid_history = {k: 0 for k in mutate_ops}
+    mutant_history = []
+    # get mutator selection strategy
+    if "svhn" in exp or "fashion2" in exp:
+        origin_model_name = "{}_origin0.hdf5".format(exp)
+    else:
+        origin_model_name = "{}_origin0.h5".format(exp)
+    # 初始种子模型列表Ms初始时只有这一个模型
+    root_dir = os.path.dirname(os.getcwd())
+
+    origin_save_path = os.path.join(mut_dir, origin_model_name)
+    mutator_selector_func, mutant_selector_func = get_selector_by_startegy_name(
+        mutator_strategy, mutant_strategy
+    )
+    # [origin_model_name] means seed pool only contains initial model at beginning.
+    mutator_selector, mutant_selector = mutator_selector_func(
+        mutate_ops
+    ), mutant_selector_func([origin_model_name], capacity=mutate_num + 1)
+    # MCMC,Roulette
+    shutil.copy(src=filename, dst=origin_save_path)
+    origin_model_status, res_dict, accumulative_inconsistency, _ = get_model_prediction(
+        res_dict, origin_save_path, origin_model_name, exp, test_size, backends
+    )
+
+    if not origin_model_status:
+        mutate_logger.error(
+            f"Origin model {exp} crashed on some backends! LEMON would skip it"
+        )
+        sys.exit(-1)
+
+    last_used_mutator = None
+    last_inconsistency = accumulative_inconsistency  # ACC
+    mutant_counter = 0
+
+    start_time = datetime.datetime.now()
+    order_inconsistency_dict = {}
+    run_stat = {
+        "start_time": start_time,
+        "time_limit": time_limit,
+        "cur_counters": mutant_counter,
+        "counters_limit": mutate_num,
+        "stop_mode": stop_mode,
+    }
+
+    # 满足限制条件就继续循环
+    while continue_checker(**run_stat):
+        global model_num
+        if model_num == mutate_num:
+            break
+        picked_seed = utils.ToolUtils.select_mutant(
+            mutant_selector
+        )  # 轮盘赌选择种子模型(伪代码3-14行)
+        selected_op = utils.ToolUtils.select_mutator(
+            mutator_selector, last_used_mutator=last_used_mutator
+        )  # 蒙特卡洛选择变异算子(伪代码15-20行)
+        mutate_op_history[selected_op] += 1
+        last_used_mutator = selected_op
+        mutator = mutator_selector.mutators[selected_op]  # 变异算子对象
+        mutant = mutant_selector.mutants[picked_seed]  # 种子模型对象
+
+        if "svhn" in picked_seed or "fashion2" in picked_seed:
+            new_seed_name = "{}-{}{}.hdf5".format(
+                picked_seed[:-5], selected_op, mutate_op_history[selected_op]
+            )
+
+        else:
+            new_seed_name = "{}-{}{}.h5".format(
+                picked_seed[:-3], selected_op, mutate_op_history[selected_op]
+            )  # 生成新模型
+        # seed name would not be duplicate
+        if new_seed_name not in mutant_selector.mutants.keys():
+            # 对应伪代码22行,因为种子模型是以当前选择的种子模型和变异算子命名的,所以重名就表示这个模型已经存在了
+            new_seed_path = os.path.join(mut_dir, new_seed_name)
+            picked_seed_path = os.path.join(mut_dir, picked_seed)
+            mutate_st = datetime.datetime.now()
+
+            model_mutation_generators = (
+                root_dir + "/scripts/mutation/model_mutation_generators.py"
+            )
+            mutate_status = os.system(
+                "{}/lemon/bin/python -u {} --model {} "
+                "--mutate_op {} --save_path {} --mutate_ratio {}".format(
+                    python_prefix,
+                    model_mutation_generators,
+                    picked_seed_path,
+                    selected_op,
+                    new_seed_path,
+                    flags.mutate_ratio,
+                )
+            )
+            # 使用变异算子进行变异(伪代码21行)
+
+            mutate_et = datetime.datetime.now()
+            mutate_dt = mutate_et - mutate_st
+            h, m, s = utils.ToolUtils.get_HH_mm_ss(mutate_dt)
+            mutate_logger.info(
+                "INFO:Mutate Time Used on {} : {}h, {}m, {}s".format(
+                    selected_op, h, m, s
+                )
+            )
+            # mutation status code is successful
+
+            if mutate_status == 0:  # 变异执行完成
+                mutant.selected += 1
+                mutator.total += 1
+                # execute this model on all platforms
+                predict_status, res_dict, accumulative_inconsistency, model_outputs = (
+                    get_model_prediction(
+                        res_dict, new_seed_path, new_seed_name, exp, test_size, backends
+                    )
+                )
+                # 计算ACC(m)
+
+                if predict_status:
+                    mutant_history.append(new_seed_name)
+                    # 伪代码23-25行
+                    print("type:", type(model_outputs))
+                    print("model_outputs:", model_outputs)
+
+                    if utils.ModelUtils.is_valid_model(
+                        inputs_backends=model_outputs, backends_nums=len(backends)
+                    ):
+
+                        delta = (
+                            accumulative_inconsistency - last_inconsistency
+                        )  # 也就是ACC(m)-ACC(s)
+                        # 下面两个if好像没什么用,因为mutator字典里只有MCMC,mutant字典里只有ROULETTE
+
+                        if mutator_strategy == "MCMC":
+                            mutator.delta_bigger_than_zero = (
+                                mutator.delta_bigger_than_zero + 1
+                                if delta > 0
+                                else mutator.delta_bigger_than_zero
+                            )
+
+                        if mutant_strategy == "ROULETTE" and delta > 0:
+                            # when size >= capacity:
+                            # random_mutant & Roulette would drop one and add new one
+                            if mutant_selector.is_full():
+                                mutant_selector.pop_one_mutant()
+                            mutant_selector.add_mutant(
+                                new_seed_name
+                            )  # 如果放大了不一致程度,即ACC(m)>=ACC(s),就加入到种子模型集合里
+                            last_inconsistency = accumulative_inconsistency  # 29行
+
+                        mutate_logger.info(
+                            "SUCCESS:{} pass testing!".format(new_seed_name)
+                        )
+                        mutant_counter += 1
+                    else:
+                        mutate_op_invalid_history[selected_op] += 1
+                        mutate_logger.error("Invalid model Found!")
+                else:
+                    mutate_logger.error("Crashed or NaN model Found!")
+            else:
+                mutate_logger.error(
+                    "Exception raised when mutate {} with {}".format(
+                        picked_seed, selected_op
+                    )
+                )
+
+            mutate_logger.info("Mutated op used history:")
+            mutate_logger.info(mutate_op_history)
+
+            mutate_logger.info("Invalid mutant generated history:")
+            mutate_logger.info(mutate_op_invalid_history)
+
+        run_stat["cur_counters"] = mutant_counter
+
+    save_mutate_history(mutator_selector, mutate_op_invalid_history, mutant_history)
+
+    # calc_cov = CoverageCalculatornew(all_json_path, api_config_pool_path)
+    # lines = 0
+    # for file in os.listdir(folder_path):
+    #     if file == 'total.json': continue
+    #     file_path = os.path.join(folder_path, file)
+    #     calc_cov.load_json(file_path)
+    #     with open(file_path, 'r') as sub_json:
+    #         sub_info = json.load(sub_json)
+    #     outer_div = len(tar_set - set(sub_info['layer_type']))
+    #     input_cov, config_cov, api_cov, op_type_cov, op_num_cov, edge_cov = calc_cov.cal_coverage()
+    #     with open(output_path, 'a+', newline='') as fi:
+    #         writer = csv.writer(fi)
+    #         head = ['Layer Input Coverage', 'Layer Parameter Diversity', 'Layer Sequence Diversity',
+    #                 'Operator Type Coverage', 'Operator Num Coverage', 'Edge Coverage', 'Accumulative inconsistency']
+    #         if not lines:
+    #             writer.writerow(head)
+    #         lines += 1
+    #         printlist = [input_cov, config_cov, api_cov, op_type_cov, op_num_cov, edge_cov,
+    #                      acc[lines]]
+    #         writer.writerow(printlist)
+
+    return res_dict
+
+
+def generate_metrics_result(res_dict, predict_output, model_idntfr):  # 计算ACC
+    mutate_logger.info("Generating Metrics Result")
+    accumulative_incons = 0
+    backends_pairs_num = 0
+    # Compare results pair by pair
+    for pair in combinations(predict_output.items(), 2):  # 每一对库
+        backends_pairs_num += 1
+        backend1, backend2 = pair
+        bk_name1, prediction1 = backend1
+        bk_name2, prediction2 = backend2
+        bk_pair = "{}_{}".format(bk_name1, bk_name2)
+        for metrics_name, metrics_result_dict in res_dict.items():
+            metrics_func = utils.MetricsUtils.get_metrics_by_name(metrics_name)  # 计算
+            # metrics_results in list type
+            metrics_results = metrics_func(
+                prediction1, prediction2, y_test[: flags.test_size]
+            )
+            # 一共test_size个数据集,所以metrics_result是长度为test_size的预测结果列表
+            # ACC -> float: The sum of all inputs under all backends
+            accumulative_incons += sum(metrics_results)  # ACC=∑
+            for input_idx, delta in enumerate(metrics_results):
+                delta_key = "{}_{}_{}_input{}".format(
+                    model_idntfr, bk_name1, bk_name2, input_idx
+                )
+                metrics_result_dict[delta_key] = delta
+
+    mutate_logger.info(f"Accumulative Inconsistency: {accumulative_incons}")
+    return res_dict, accumulative_incons
+
+
+def generate_gini_result(predict_output, backends):
+    gini_res = {bk: 0 for bk in backends}
+    for pair in predict_output.items():
+        bk_name, prediction = pair
+        gini_res[bk_name] = utils.MetricsUtils.get_gini_mean(prediction)
+    return gini_res
+
+
+def generate_theta(predict_output, backends):
+    theta_res = {bk: 0 for bk in backends}
+    for pair in predict_output.items():
+        bk_name, prediction = pair
+        theta_res[bk_name] = utils.MetricsUtils.get_theta_mean(
+            prediction, y_test[: flags.test_size]
+        )
+    return theta_res
+
+
+SHAPE_SPACE = 5
+model_num = 0
+
+
+def get_model_prediction(res_dict, model_path, model_name, exp, test_size, backends):
+    # 计算ACC
+    """
+    Get model prediction on different backends and calculate distance by metrics
+    """
+
+    root_dir = model_path.split("origin_model")[0]
+
+    npy_path = (
+        root_dir + "res.npy"
+    )  # 保存模型预测结果的路径,patch_prediction_extractor.py中的44行改成一样的路径
+
+    predict_output = {b: [] for b in backends}
+    model_idntfr = model_name[:-3]
+    all_backends_predict_status = True
+    for bk in backends:
+        python_bin = f"{python_prefix}/{bk}/bin/python"
+        predict_st = datetime.datetime.now()
+        # 使用不同的库进行预测
+        pre_status_bk = os.system(
+            f"{python_bin} -u -m patch_prediction_extractor --backend {bk} "
+            f"--exp {exp} --test_size {test_size} --model {model_path} "
+            f"--config_name {flags.config_name}"
+        )
+
+        predict_et = datetime.datetime.now()
+        predict_td = predict_et - predict_st
+        h, m, s = utils.ToolUtils.get_HH_mm_ss(predict_td)
+        mutate_logger.info(
+            "Prediction Time Used on {} : {}h, {}m, {}s".format(bk, h, m, s)
+        )
+
+        # If no exception is thrown,save prediction result
+        if pre_status_bk == 0:  # 预测执行成功,保存结果
+            # data = pickle.loads(redis_conn.hget("prediction_{}".format(model_name), bk))
+            data = np.load(npy_path)
+            predict_output[bk] = data
+            # print(data)
+        # record the crashed backend
+        else:
+            all_backends_predict_status = False
+            mutate_logger.error(
+                "{} crash on backend {} when predicting ".format(model_name, bk)
+            )
+
+    status = False
+    accumulative_incons = None
+
+    # run ok on all platforms
+    if (
+        all_backends_predict_status
+    ):  # 所有的库都执行成功且保存了结果,判断结果中是否有错误
+        predictions = list(predict_output.values())
+        res_dict, accumulative_incons = generate_metrics_result(
+            res_dict=res_dict, predict_output=predict_output, model_idntfr=model_idntfr
+        )
+        # 计算ACC(用于衡量预测结果的不一致程度)
+        # gini_res = generate_gini_result(predict_output=predict_output, backends=backends)
+        # theta = generate_theta(predict_output=predict_output, backends=backends)
+        # import csv
+        # csvfile = open(r"D:\lemon_outputs\result\mobilenet.1.00.224-imagenet\tensorflow\5.csv", 'a+',newline='')
+        # write=csv.writer(csvfile)
+        # write.writerow([accumulative_incons, gini_res['tensorflow'], theta['tensorflow']])
+        # csvfile.close()
+        #
+        # csvfile = open(r"D:\lemon_outputs\result\mobilenet.1.00.224-imagenet\mxnet\5.csv", 'a+',newline='')
+        # write=csv.writer(csvfile)
+        # write.writerow([accumulative_incons, gini_res['mxnet'], theta['mxnet']])
+        # csvfile.close()
+        # 计算gini
+        # If all backends are working fine, check if there exists NAN or INF in the result
+        # `accumulative_incons` is nan or inf --> NaN or INF in results
+        if is_nan_or_inf(accumulative_incons):
+            # has NaN on partial backends
+            if partially_nan_or_inf(predictions, len(backends)):
+                nan_model_path = os.path.join(nan_dir, f"{model_idntfr}_NaN_bug.h5")
+                mutate_logger.error("Error: Found one NaN bug. move NAN model")
+
+            # has NaN on all backends --> not a NaN bug
+            else:
+                nan_model_path = os.path.join(
+                    nan_dir, f"{model_idntfr}_NaN_on_all_backends.h5"
+                )
+                mutate_logger.error(
+                    "Error: Found one NaN Model on all libraries. move NAN model"
+                )
+            shutil.move(model_path, nan_model_path)
+
+        else:  # No NaN or INF on any backend
+            print(model_path)
+            for bk in backends:
+                python_bin = f"{python_prefix}/{bk}/bin/python"
+                os.system(
+                    f"{python_bin} -u -m model_to_txt --backend {bk} --model_path {model_path} --root_dir {root_dir}"
+                )
+            # if 'svhn' in model_name or 'fashion2' in model_name:
+            #     file_path = os.path.join(folder_path, model_path.split("\\")[-1][:-5] + '.json')
+            # else:
+            #     file_path = os.path.join(folder_path, model_path.split("\\")[-1][:-3] + '.json')
+            # union_json(file_path, all_json_path)
+            # model_now = keras.models.load_model(model_path, custom_objects=custom_objects())
+            # inner_div[model_num] = calc_inner_div(model_now)
+            # with open(file_path, 'r') as sub_json:
+            #    sub_info = json.load(sub_json)
+            # if len(set(sub_info['layer_type'])) > len(tar_set):
+            #     tar_set = set(sub_info['layer_type'])
+
+            mutate_logger.info("Saving prediction")
+            with open(
+                "{}/prediction_{}.pkl".format(inner_output_dir, model_idntfr), "wb+"
+            ) as f:
+                pickle.dump(predict_output, file=f)
+            status = True
+
+    # save crashed model
+    else:
+        mutate_logger.error("Error: move crash model")
+        crash_model_path = os.path.join(crash_dir, model_name)
+        shutil.move(model_path, crash_model_path)
+
+    return status, res_dict, accumulative_incons, predict_output
+
+
+if __name__ == "__main__":
+
+    starttime = datetime.datetime.now()
+    """
+    
+    Parser of command args. 
+    It could make mutate_lemon.py run independently without relying on mutation_executor.py
+    """
+    parse = argparse.ArgumentParser()
+    parse.add_argument(
+        "--is_mutate",
+        type=ast.literal_eval,
+        default=False,
+        help="parameter to determine mutation option",
+    )
+    parse.add_argument(
+        "--mutate_op",
+        type=str,
+        nargs="+",
+        choices=[
+            "WS",
+            "GF",
+            "NEB",
+            "NAI",
+            "NS",
+            "ARem",
+            "ARep",
+            "LA",
+            "LC",
+            "LR",
+            "LS",
+            "MLA",
+        ],
+        help="parameter to determine mutation option",
+    )
+    parse.add_argument(
+        "--model", type=str, help="relative path of model file(from root dir)"
+    )
+    parse.add_argument(
+        "--output_dir", type=str, help="relative path of output dir(from root dir)"
+    )
+    parse.add_argument("--backends", type=str, nargs="+", help="list of backends")
+    parse.add_argument(
+        "--mutate_num",
+        type=int,
+        help="number of variant models generated by each mutation operator",
+    )
+    parse.add_argument("--mutate_ratio", type=float, help="ratio of mutation")
+    parse.add_argument("--exp", type=str, help="experiments identifiers")
+    parse.add_argument("--test_size", type=int, help="amount of testing image")
+    parse.add_argument("--config_name", type=str, help="config name")
+    flags, unparsed = parse.parse_known_args(sys.argv[1:])
+    warnings.filterwarnings("ignore")
+    lemon_cfg = configparser.ConfigParser()
+    # lemon_cfg.read(f".\config\{flags.config_name}")
+    cfg_path = os.path.join(os.path.dirname(os.getcwd()), "config", flags.config_name)
+    lemon_cfg.read(cfg_path)
+    # lemon_cfg.read(f"config/demo.conf")
+    time_limit = lemon_cfg["parameters"].getint("time_limit")
+    mutator_strategy = lemon_cfg["parameters"].get("mutator_strategy").upper()
+    mutant_strategy = lemon_cfg["parameters"].get("mutant_strategy").upper()
+    stop_mode = lemon_cfg["parameters"].get("stop_mode").upper()
+    alpha = lemon_cfg["parameters"].getfloat("alpha")
+
+    mutate_logger = Logger()
+    #    pool = redis.ConnectionPool(host=lemon_cfg['redis']['host'], port=lemon_cfg['redis']['port'],
+    #                                db=lemon_cfg['redis'].getint('redis_db'))
+    #    redis_conn = redis.Redis(connection_pool=pool)
+
+    # for k in redis_conn.keys():
+    #     if flags.exp in k.decode("utf-8"):
+    #         redis_conn.delete(k)
+
+    # exp : like lenet5-mnist
+    experiment_dir = os.path.join(flags.output_dir, flags.exp)
+    mut_dir = os.path.join(experiment_dir, "mut_model")
+    crash_dir = os.path.join(experiment_dir, "crash")
+    nan_dir = os.path.join(experiment_dir, "nan")
+    inner_output_dir = os.path.join(experiment_dir, "inner_output")
+    metrics_result_dir = os.path.join(experiment_dir, "metrics_result")
+
+    x, y = utils.DataUtils.get_data_by_exp(flags.exp)  # 从conf文件中读取数据并转换形式
+    x_test, y_test = x[: flags.test_size], y[: flags.test_size]
+    pool_size = lemon_cfg["parameters"].getint("pool_size")
+    python_prefix = lemon_cfg["parameters"]["python_prefix"].rstrip("\\")
+    try:  # 执行算法
+        metrics_list = lemon_cfg["parameters"]["metrics"].split(" ")  # D_MAD
+        lemon_results = {k: dict() for k in metrics_list}
+        lemon_results = _generate_and_predict(
+            lemon_results,
+            flags.model,
+            flags.mutate_num,
+            flags.mutate_op,
+            flags.test_size,
+            flags.exp,
+            flags.backends,
+        )
+        with open(
+            "{}/{}_lemon_results.pkl".format(experiment_dir, flags.exp), "wb+"
+        ) as f:
+            pickle.dump(lemon_results, file=f)
+        utils.MetricsUtils.generate_result_by_metrics(
+            metrics_list, lemon_results, metrics_result_dir, flags.exp
+        )
+
+    except Exception as e:
+        mutate_logger.exception(sys.exc_info())
+
+    from keras import backend as K
+
+    K.clear_session()
+
+    endtime = datetime.datetime.now()
+    time_delta = endtime - starttime
+    h, m, s = utils.ToolUtils.get_HH_mm_ss(time_delta)
+    mutate_logger.info(
+        "Mutation process is done: Time used: {} hour,{} min,{} sec".format(h, m, s)
+    )

+ 146 - 0
server/LEMON-master/run/mutation_executor.py

@@ -0,0 +1,146 @@
+# -*-coding:UTF-8-*-
+import argparse
+import sys
+import os
+
+sys.path.append("../")
+from scripts.logger.lemon_logger import Logger
+import warnings
+import datetime
+import configparser
+from scripts.tools import utils
+
+"""Init cuda"""
+warnings.filterwarnings("ignore")
+os.environ["KERAS_BACKEND"] = "tensorflow"
+os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"  # 只显示 warning 和 Error
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+os.environ["CUDA_VISIBLE_DEVICES"] = ""
+"""Setting random seed"""
+
+if __name__ == "__main__":
+
+    # config_name = sys.argv[1]
+    config_name = "demo.conf"
+
+    parse = argparse.ArgumentParser()
+    parse.add_argument(
+        "--exp",
+        type=str,
+        choices=[
+            "lexnet-cifar10",
+            "lenet5-fashion-mnist",
+            "fashion2",
+            "svhn",
+            "lenet5-mnist",
+            "alexnet-cifar10",
+            "mobilenet.1.00.224-imagenet",
+            "vgg16-imagenet",
+        ],
+        help="experiments identifiers",
+    )
+    parse.add_argument("--root_dir", type=str, help="root dir")
+    parse.add_argument(
+        "--output_dir", type=str, help="relative path of output dir(from root dir)"
+    )
+    parse.add_argument(
+        "--mutate_num",
+        type=int,
+        help="number of variant models generated by each mutation operator",
+    )
+    parse.add_argument("--config_name", type=str, help="config name,like demo.conf")
+    flags_input, unparsed = parse.parse_known_args(sys.argv[1:])
+
+    lemon_cfg = configparser.ConfigParser()
+    cfg_path = os.path.join(flags_input.root_dir, "config", flags_input.config_name)
+    lemon_cfg.read(cfg_path)
+    parameters = lemon_cfg["parameters"]
+    flags = argparse.Namespace(
+        mutate_ops=parameters["mutate_ops"],
+        exps=flags_input.exp.lstrip().rstrip().split(" "),
+        origin_model_dir=os.path.join(
+            flags_input.root_dir, parameters["origin_model_dir"]
+        ),
+        output_dir=flags_input.output_dir,
+        backend=parameters["backend"],
+        mutate_num=flags_input.mutate_num,
+        mutate_ratio=parameters.getfloat("mutate_ratio"),
+        test_size=parameters.getint("test_size"),
+        threshold=parameters.getfloat("threshold"),
+        redis_db=lemon_cfg["parameters"].getint("redis_db"),
+        python_prefix=parameters["python_prefix"].rstrip("/"),
+    )
+
+    parse = argparse.ArgumentParser()
+    if not os.path.exists(flags.output_dir):
+        os.makedirs(flags.output_dir)
+
+    main_log = Logger()
+
+    """Lemon process"""
+    main_log.logger.info("Success: Lemon start successfully!")
+    start_time = datetime.datetime.now()
+    for exp_identifier in flags.exps:
+
+        """Make directory"""
+        experiment_dir = os.path.join(
+            flags.output_dir, exp_identifier
+        )  # exp : like lenet5-mnist
+        mut_dir = os.path.join(experiment_dir, "mut_model")
+        crash_dir = os.path.join(experiment_dir, "crash")
+        nan_dir = os.path.join(experiment_dir, "nan")
+        inner_output_dir = os.path.join(experiment_dir, "inner_output")
+        metrics_result_dir = os.path.join(experiment_dir, "metrics_result")
+        print(experiment_dir)
+        if not os.path.exists(experiment_dir):
+            os.makedirs(experiment_dir)
+        if not os.path.exists(mut_dir):
+            os.makedirs(mut_dir)
+        if not os.path.exists(crash_dir):
+            os.makedirs(crash_dir)
+        if not os.path.exists(nan_dir):
+            os.makedirs(nan_dir)
+        if not os.path.exists(inner_output_dir):
+            os.makedirs(inner_output_dir)
+        if not os.path.exists(metrics_result_dir):
+            os.makedirs(metrics_result_dir)
+
+        try:
+            """Mutate and get output of different backends"""
+            main_log.info("INFO:Lemon mutation starting!")
+            main_log.info("INFO:Lemon for exp: {}".format(exp_identifier))
+            if "svhn" in exp_identifier or "fashion2" in exp_identifier:
+                origin_model_name = "{}_origin.hdf5".format(exp_identifier)
+            else:
+                origin_model_name = "{}_origin.h5".format(exp_identifier)
+            origin_model_file = os.path.join(flags.origin_model_dir, origin_model_name)
+            mutate_lemon = (
+                "{}/lemon/bin/python -u -m mutate_lemon --mutate_op {} --model {} --output_dir {}"
+                " --backends {} --mutate_num {} --mutate_ratio {} --exp {} --test_size {} --redis_db {} --config_name {}".format(
+                    flags.python_prefix,
+                    flags.mutate_ops,
+                    origin_model_file,
+                    flags.output_dir,
+                    flags.backend,
+                    flags.mutate_num,
+                    flags.mutate_ratio,
+                    exp_identifier,
+                    flags.test_size,
+                    flags.redis_db,
+                    config_name,
+                )
+            )
+            os.system(mutate_lemon)
+            print(mutate_lemon)
+
+        except Exception:
+            main_log.error("Error: Lemon for exp:{} Failed!".format(exp_identifier))
+
+            main_log.exception(sys.exc_info())
+
+    end_time = datetime.datetime.now()
+    time_delta = end_time - start_time
+    h, m, s = utils.ToolUtils.get_HH_mm_ss(time_delta)
+    main_log.info(
+        "INFO:Lemon is done: Time used: {} hour,{} min,{} sec".format(h, m, s)
+    )

+ 123 - 0
server/LEMON-master/run/patch_hidden_output_extractor.py

@@ -0,0 +1,123 @@
+# -*-coding:UTF-8-*-
+"""
+# Part  of localization phase
+# get prediction for each backend
+"""
+import sys
+sys.path.append("../")
+import os
+import pickle
+import argparse
+from scripts.tools.utils import DataUtils,ModelUtils
+from scripts.logger.lemon_logger import Logger
+import configparser
+import warnings
+import traceback
+import numpy as np
+
+#np.random.seed(20200501)
+warnings.filterwarnings("ignore")
+
+
+def _get_hidden_output(test_data,backend,select_model,model_dir,data_index):
+    """
+        layers_output: list of ndarray which store outputs in each layer
+        The result stored in redis like:
+        (lenet5-mnist_origin0_theano,layers_output)
+    """
+    if 'svhn' in select_model or 'fashion2' in select_model:
+        model_pathname = os.path.join(model_dir, "{}.hdf5".format(select_model))
+    else:
+        model_pathname = os.path.join(model_dir, "{}.h5".format(select_model))
+    model = keras.models.load_model(model_pathname,custom_objects=ModelUtils.custom_objects())
+
+    model_idntfr_backend = "{}_{}_{}".format(select_model, backend, data_index)
+    select_data = np.expand_dims(test_data[data_index], axis=0)
+    layers_output = ModelUtils.layers_output(model, select_data)
+    with open(os.path.join(localize_output_dir,model_idntfr_backend),"wb") as fw:
+        pickle.dump(layers_output,fw)
+
+
+if __name__ == "__main__":
+
+    """Parser of command args"""
+    parse = argparse.ArgumentParser()
+    parse.add_argument("--backend", type=str, help="name of backends")
+    parse.add_argument("--exp", type=str, help="experiments identifiers")
+    parse.add_argument("--output_dir", type=str, help="relative path of output dir(from root dir)")
+    parse.add_argument("--data_index", type=int, help="redis db port")
+    parse.add_argument("--config_name", type=str, help="config name")
+    parse.add_argument("--model_idntfr", type=str, help="redis db port")
+    flags, unparsed = parse.parse_known_args(sys.argv[1:])
+    mylogger = Logger()
+
+    """Load Configuration"""
+    warnings.filterwarnings("ignore")
+    lemon_cfg = configparser.ConfigParser()
+    # lemon_cfg.read(f"./config/{flags.config_name}")
+    grandparent_directory = os.path.dirname(os.getcwd())
+    conf_path = grandparent_directory + "/config/demo.conf"
+    lemon_cfg.read(conf_path)
+    parameters = lemon_cfg['parameters']
+    gpu_ids = parameters['gpu_ids']
+    gpu_list = parameters['gpu_ids'].split(",")
+
+    """Init cuda"""
+    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+    os.environ["CUDA_VISIBLE_DEVICES"] = gpu_ids
+    warnings.filterwarnings("ignore")
+
+    batch_size = 64
+    """Switch backend"""
+    bk_list = ['tensorflow', 'theano', 'cntk', 'mxnet']
+    bk = flags.backend
+    print('.........................',type(bk))
+    os.environ['KERAS_BACKEND'] = bk
+    os.environ['PYTHONHASHSEED'] = '0'
+
+    if bk == 'tensorflow':
+        os.environ["TF_CPP_MIN_LOG_LEVEL"] = '2'  # 只显示 warning 和 Error
+        import tensorflow as tf
+        mylogger.info(tf.__version__)
+
+    if bk == 'theano':
+        if len(gpu_list) == 2:
+            os.environ[
+                'THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]};dev{gpu_list[1]}->cuda{gpu_list[1]}," \
+                                  f"force_device=True,floatX=float32,lib.cnmem=1"
+        else:
+            os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]}," \
+                                         f"force_device=True,floatX=float32,lib.cnmem=1"
+        batch_size = 32
+        import theano as th
+
+        mylogger.info(th.__version__)
+    if bk == "cntk":
+        batch_size = 32
+        from cntk.device import try_set_default_device, gpu
+        try_set_default_device(gpu(int(gpu_list[0])))
+        import cntk as ck
+        mylogger.info(ck.__version__)
+
+    if bk == "mxnet":
+        batch_size = 32
+        import mxnet as mxnet
+        mylogger.info(mxnet.__version__)
+
+    from keras import backend as K
+    import keras
+    mylogger.logger.info("Using {} as backend for states extraction| {} is wanted".format(K.backend(),bk))
+
+    """Get model hidden output on selected_index data on specific backend"""
+    try:
+        backend_input_dict = {}
+        localize_output_dir = os.path.join(flags.output_dir,flags.exp,"localize_tmp")
+        x, y = DataUtils.get_data_by_exp(flags.exp)
+        mut_dir = os.path.join(flags.output_dir,flags.exp,"mut_model")
+        _get_hidden_output(test_data=x, backend=bk,select_model=flags.model_idntfr,model_dir=mut_dir,data_index=flags.data_index)
+        mylogger.logger.info("Hidden output extracting done!")
+    except:
+        traceback.print_exc()
+        sys.exit(-1)
+
+

+ 134 - 0
server/LEMON-master/run/patch_prediction_extractor.py

@@ -0,0 +1,134 @@
+# -*-coding:UTF-8-*-
+"""get prediction for each backend
+"""
+import sys
+import os
+sys.path.append("../")
+import numpy as np
+import pickle
+import argparse
+import configparser
+from scripts.tools.utils import DataUtils
+from scripts.logger.lemon_logger import Logger
+import warnings
+
+main_logger = Logger()
+
+
+def custom_objects():
+
+    def no_activation(x):
+        return x
+
+    def leakyrelu(x):
+        import keras.backend as K
+        return K.relu(x, alpha=0.01)
+
+    objects = {}
+    objects['no_activation'] = no_activation
+    objects['leakyrelu'] = leakyrelu
+    return objects
+
+
+def _get_prediction(bk, x, y, model_path,batch_size):
+    """
+    Get prediction of models on different backends
+    """
+    test_x, test_y = x[:flags.test_size],y[:flags.test_size]
+    predict_model = keras.models.load_model(model_path,custom_objects=custom_objects())
+    # predict_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
+    main_logger.info("INFO:load model and compile done!")
+    res = predict_model.predict(test_x,batch_size=batch_size)
+    root_dir = model_path.split("origin_model")[0]
+    
+    npy_path = root_dir + 'res.npy'  # 保存模型预测结果的路径,patch_prediction_extractor.py中的44行改成一样的路径
+
+    #test_x:测试集
+    #batch_size:单次训练数据样本大小
+    np.save(npy_path,res)
+    #把预测结果保存到本地
+    main_logger.info("SUCCESS:Get prediction for {} successfully on {}!".format(mut_model_name,bk))
+    """Store prediction result to redis"""
+#    redis_conn.hset("prediction_{}".format(mut_model_name),bk,pickle.dumps(res))
+
+
+if __name__ == "__main__":
+
+    """Parser of command args"""
+    parse = argparse.ArgumentParser()
+    parse.add_argument("--backend", type=str, help="name of backends")
+    parse.add_argument("--exp", type=str, help="experiments identifiers")
+    parse.add_argument("--test_size", type=int, help="amount of testing image")
+    parse.add_argument("--model", type=str, help="path of the model to predict")
+    #parse.add_argument("--redis_db", type=int)
+    parse.add_argument("--config_name", type=str)
+    flags, unparsed = parse.parse_known_args(sys.argv[1:])
+    
+    """Load Configuration"""
+    warnings.filterwarnings("ignore")
+    lemon_cfg = configparser.ConfigParser()
+    # lemon_cfg.read(f"./config/{flags.config_name}")
+    root_dir = flags.model.split("origin_model")[0]
+    cfg_path = os.path.join(os.path.dirname(os.getcwd()), "config", flags.config_name)
+    lemon_cfg.read(cfg_path)
+    #pool = redis.ConnectionPool(host=lemon_cfg['redis']['host'], port=lemon_cfg['redis']['port'],db=flags.redis_db)
+    #redis_conn = redis.Redis(connection_pool=pool)
+
+    parameters = lemon_cfg['parameters']
+   # gpu_ids = parameters['gpu_ids']
+   # gpu_list = parameters['gpu_ids'].split(",")
+
+    """Init cuda"""
+    #os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+    #os.environ["CUDA_VISIBLE_DEVICES"] = gpu_ids
+    warnings.filterwarnings("ignore")
+
+    batch_size= 32
+    """Switch backend"""
+    bk_list = ['tensorflow', 'mxnet']
+    bk = flags.backend
+    os.environ['KERAS_BACKEND'] = bk
+    os.environ['PYTHONHASHSEED'] = '0'
+    if bk == 'tensorflow':
+        os.environ["TF_CPP_MIN_LOG_LEVEL"] = '2'  # 只显示 warning 和 Error
+        import tensorflow as tf
+        main_logger.info(tf.__version__)
+        batch_size = 128
+        import keras
+    if bk == 'theano':
+        # if len(gpu_list) == 2:
+        #     os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]};dev{gpu_list[1]}->cuda{gpu_list[1]}," \
+        #                                  f"force_device=True,floatX=float32,lib.cnmem=1"
+        # else:
+        #     os.environ['THEANO_FLAGS'] = f"device=cuda,contexts=dev{gpu_list[0]}->cuda{gpu_list[0]}," \
+        #                                  f"force_device=True,floatX=float32,lib.cnmem=1"
+        import theano as th
+        import keras
+        main_logger.info(th.__version__)
+    if bk == "cntk":
+        #from cntk.device import try_set_default_device,gpu
+        #try_set_default_device(gpu(int(gpu_list[0])))
+        import cntk as ck
+        main_logger.info(ck.__version__)
+        import keras
+
+    if bk == "mxnet":
+        import mxnet as mxnet
+        main_logger.info(f"mxnet_version {mxnet.__version__}")
+        import keras
+
+        batch_size = 16
+    from keras import backend as K
+
+
+    try:
+        """Get model prediction"""
+        main_logger.info("INFO:Using {} as backend for states extraction| {} is wanted".format(K.backend(),bk))
+        x, y = DataUtils.get_data_by_exp(flags.exp)#读取数据集并做转换
+        mut_model_name = os.path.split(flags.model)[-1]
+        print(flags.model)
+        _get_prediction(bk=bk, x=x, y=y, model_path=flags.model,batch_size=batch_size)
+    except Exception:
+        import traceback
+        traceback.print_exc()
+        sys.exit(-1)

+ 280 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARem1.json

@@ -0,0 +1,280 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "no_activation",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARem5.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "no_activation",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep14.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "sigmoid",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 317 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep2.json

@@ -0,0 +1,317 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "tanh",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-ARep8.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "leakyrelu",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF1.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF10.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF11.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF2.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-GF9.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 335 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LA1.json

@@ -0,0 +1,335 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "ThresholdedReLU"
+        ],
+        [
+            "ThresholdedReLU",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "ThresholdedReLU": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    28,
+                    28,
+                    6
+                ],
+                "dtype": "float32",
+                "theta": 1.0
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "ThresholdedReLU": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "ThresholdedReLU",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "ThresholdedReLU": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 298 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC1.json

@@ -0,0 +1,298 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 298 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC4.json

@@ -0,0 +1,298 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 298 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-LC6.json

@@ -0,0 +1,298 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 376 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-MLA3.json

@@ -0,0 +1,376 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "ZeroPadding2D"
+        ],
+        [
+            "ZeroPadding2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    3,
+                    3
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "ZeroPadding2D": [
+            {
+                "trainable": true,
+                "padding": [
+                    [
+                        1,
+                        1
+                    ],
+                    [
+                        1,
+                        1
+                    ]
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]",
+                "[Dimension(None), Dimension(7), Dimension(7), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "ZeroPadding2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 12,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "ZeroPadding2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 12,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "ZeroPadding2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI1.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI2.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NAI9.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NEB3.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-NS4.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1-WS1.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep1.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 280 - 0
server/LEMON-master/run/sub_model/svhn_origin0-ARep10.json

@@ -0,0 +1,280 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "tanh",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 257 - 0
server/LEMON-master/run/sub_model/svhn_origin0-GF4.json

@@ -0,0 +1,257 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 298 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LA2.json

@@ -0,0 +1,298 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Softmax"
+        ],
+        [
+            "Softmax",
+            "Activation"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Softmax": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    10,
+                    10,
+                    16
+                ],
+                "dtype": "float32",
+                "axis": -1
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Softmax": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Softmax",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Softmax": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 303 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LA3.json

@@ -0,0 +1,303 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    3,
+                    3
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "same",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 294 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LA5.json

@@ -0,0 +1,294 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Softmax"
+        ],
+        [
+            "Softmax",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Softmax": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    14,
+                    14,
+                    6
+                ],
+                "dtype": "float32",
+                "axis": -1
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Softmax": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Softmax",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Softmax": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 292 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LA7.json

@@ -0,0 +1,292 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "ThresholdedReLU"
+        ],
+        [
+            "ThresholdedReLU",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "ThresholdedReLU": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    400
+                ],
+                "dtype": "float32",
+                "theta": 1.0
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "ThresholdedReLU": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "ThresholdedReLU",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "ThresholdedReLU": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 261 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LC3.json

@@ -0,0 +1,261 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 261 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LC8.json

@@ -0,0 +1,261 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 11,
+    "layer_type": [
+        "Conv2D",
+        "Activation",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 11,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 297 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARem3.json

@@ -0,0 +1,297 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "no_activation",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 297 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARep11.json

@@ -0,0 +1,297 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "relu",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 297 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-ARep16.json

@@ -0,0 +1,297 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "leakyrelu",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 297 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-ARem8.json

@@ -0,0 +1,297 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "no_activation",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-ARep15.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-GF13.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-GF14.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 226 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-LR10.json

@@ -0,0 +1,226 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 350 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-MLA9.json

@@ -0,0 +1,350 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "RepeatVector"
+        ],
+        [
+            "RepeatVector",
+            "Reshape"
+        ],
+        [
+            "Reshape",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "linear",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "RepeatVector": [
+            {
+                "trainable": true,
+                "n": 3
+            }
+        ],
+        "Reshape": [
+            {
+                "trainable": true,
+                "target_shape": [
+                    360
+                ]
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(360)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        },
+        "RepeatVector": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(120)]"
+            ]
+        },
+        "Reshape": {
+            "input_dims": [
+                3
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(3), Dimension(120)]"
+            ]
+        }
+    },
+    "layer_num": 12,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense",
+        "RepeatVector",
+        "Reshape"
+    ],
+    "cur_edge_num": 12,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "RepeatVector": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                3
+            ]
+        },
+        "Reshape": {
+            "input_dims": [
+                3
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12-WS8.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF12.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 260 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-GF3.json

@@ -0,0 +1,260 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 335 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LA4.json

@@ -0,0 +1,335 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "SeparableConv2D"
+        ],
+        [
+            "SeparableConv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "SeparableConv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    3,
+                    3
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "same",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": true,
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "bias_constraint": "None",
+                "depth_multiplier": 1,
+                "depthwise_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "pointwise_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "depthwise_regularizer": "None",
+                "pointwise_regularizer": "None",
+                "depthwise_constraint": "None",
+                "pointwise_constraint": "None"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "SeparableConv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "SeparableConv2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "SeparableConv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 301 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LA9.json

@@ -0,0 +1,301 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "ReLU"
+        ],
+        [
+            "ReLU",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "ReLU": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    120
+                ],
+                "dtype": "float32",
+                "max_value": "1.0",
+                "negative_slope": "0.0",
+                "threshold": "0.0"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        },
+        "ReLU": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(120)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense",
+        "ReLU"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "ReLU": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 264 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC5.json

@@ -0,0 +1,264 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 264 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC7.json

@@ -0,0 +1,264 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 264 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LC9.json

@@ -0,0 +1,264 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "Conv2D",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "Activation"
+        ],
+        [
+            "Activation",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Activation": [
+            {
+                "trainable": true,
+                "activation": "relu"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 10,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Activation",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 10,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Activation": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 263 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARem7.json

@@ -0,0 +1,263 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "no_activation",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep12.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "sigmoid",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep13.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "sigmoid",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 286 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARem2.json

@@ -0,0 +1,286 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "no_activation",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep4.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 286 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep5.json

@@ -0,0 +1,286 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "relu",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 286 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep6.json

@@ -0,0 +1,286 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "tanh",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-ARep7.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-GF15.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-GF7.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 298 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-LA6.json

@@ -0,0 +1,298 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "PReLU"
+        ],
+        [
+            "PReLU",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "PReLU": [
+            {
+                "trainable": true,
+                "batch_input_shape": [
+                    null,
+                    84
+                ],
+                "dtype": "float32",
+                "alpha_initializer": {
+                    "class_name": "RandomNormal",
+                    "config": {
+                        "mean": 0.0,
+                        "stddev": 0.05,
+                        "seed": null
+                    }
+                },
+                "alpha_regularizer": "None",
+                "alpha_constraint": "None",
+                "shared_axes": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        },
+        "PReLU": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 9,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense",
+        "PReLU"
+    ],
+    "cur_edge_num": 9,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "PReLU": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 365 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-MLA7.json

@@ -0,0 +1,365 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Cropping2D"
+        ],
+        [
+            "Cropping2D",
+            "Reshape"
+        ],
+        [
+            "Reshape",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Reshape"
+        ],
+        [
+            "Reshape",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Cropping2D": [
+            {
+                "trainable": true,
+                "cropping": [
+                    [
+                        1,
+                        1
+                    ],
+                    [
+                        1,
+                        1
+                    ]
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Reshape": [
+            {
+                "trainable": true,
+                "target_shape": [
+                    144
+                ]
+            },
+            {
+                "trainable": true,
+                "target_shape": [
+                    5,
+                    5,
+                    16
+                ]
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "linear",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Cropping2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Reshape": {
+            "input_dims": [
+                4,
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(3), Dimension(3), Dimension(16)]",
+                "[Dimension(None), Dimension(400)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(144)]",
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        }
+    },
+    "layer_num": 12,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Cropping2D",
+        "Reshape",
+        "Dense",
+        "Flatten"
+    ],
+    "cur_edge_num": 12,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Cropping2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Reshape": {
+            "input_dims": [
+                4,
+                2
+            ],
+            "output_dims": [
+                2,
+                4
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NAI11.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NAI6.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NEB1.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NEB2.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NS5.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-NS6.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

+ 249 - 0
server/LEMON-master/run/sub_model/svhn_origin0-LR1-LR2-ARep3-WS4.json

@@ -0,0 +1,249 @@
+{
+    "edges": [
+        [
+            "Conv2D",
+            "MaxPooling2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Conv2D"
+        ],
+        [
+            "MaxPooling2D",
+            "Flatten"
+        ],
+        [
+            "Flatten",
+            "Dense"
+        ],
+        [
+            "Dense",
+            "Dense"
+        ]
+    ],
+    "layer_config": {
+        "Conv2D": [
+            {
+                "trainable": true,
+                "kernel_size": [
+                    5,
+                    5
+                ],
+                "strides": [
+                    1,
+                    1
+                ],
+                "padding": "valid",
+                "data_format": "channels_last",
+                "dilation_rate": [
+                    1,
+                    1
+                ],
+                "activation": "linear",
+                "use_bias": false,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ],
+        "MaxPooling2D": [
+            {
+                "trainable": true,
+                "pool_size": [
+                    2,
+                    2
+                ],
+                "padding": "valid",
+                "strides": [
+                    2,
+                    2
+                ],
+                "data_format": "channels_last"
+            }
+        ],
+        "Flatten": [
+            {
+                "trainable": true,
+                "data_format": "channels_last"
+            }
+        ],
+        "Dense": [
+            {
+                "trainable": true,
+                "activation": "leakyrelu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "relu",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            },
+            {
+                "trainable": true,
+                "activation": "softmax",
+                "use_bias": true,
+                "kernel_initializer": {
+                    "class_name": "VarianceScaling",
+                    "config": {
+                        "scale": 1.0,
+                        "mode": "fan_avg",
+                        "distribution": "uniform",
+                        "seed": null
+                    }
+                },
+                "bias_initializer": {
+                    "class_name": "Zeros",
+                    "config": {}
+                },
+                "kernel_regularizer": "None",
+                "bias_regularizer": "None",
+                "activity_regularizer": "None",
+                "kernel_constraint": "None",
+                "bias_constraint": "None"
+            }
+        ]
+    },
+    "layer_input_info": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(32), Dimension(32), Dimension(3)]",
+                "[Dimension(None), Dimension(14), Dimension(14), Dimension(6)]"
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(28), Dimension(28), Dimension(6)]",
+                "[Dimension(None), Dimension(10), Dimension(10), Dimension(16)]"
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(5), Dimension(5), Dimension(16)]"
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "dtype": [
+                "float32"
+            ],
+            "shape": [
+                "[Dimension(None), Dimension(None)]",
+                "[Dimension(None), Dimension(120)]",
+                "[Dimension(None), Dimension(84)]"
+            ]
+        }
+    },
+    "layer_num": 8,
+    "layer_type": [
+        "Conv2D",
+        "MaxPooling2D",
+        "Flatten",
+        "Dense"
+    ],
+    "cur_edge_num": 8,
+    "layer_dims": {
+        "Conv2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "MaxPooling2D": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                4
+            ]
+        },
+        "Flatten": {
+            "input_dims": [
+                4
+            ],
+            "output_dims": [
+                2
+            ]
+        },
+        "Dense": {
+            "input_dims": [
+                2
+            ],
+            "output_dims": [
+                2
+            ]
+        }
+    }
+}

Niektoré súbory nie sú zobrazené, pretože je v týchto rozdielových dátach zmenené mnoho súborov