github.com/apache/beam/sdks/v2@v2.48.2/python/tox.ini (about)

     1  ;
     2  ;    Licensed to the Apache Software Foundation (ASF) under one or more
     3  ;    contributor license agreements.  See the NOTICE file distributed with
     4  ;    this work for additional information regarding copyright ownership.
     5  ;    The ASF licenses this file to You under the Apache License, Version 2.0
     6  ;    (the "License"); you may not use this file except in compliance with
     7  ;    the License.  You may obtain a copy of the License at
     8  ;
     9  ;       http://www.apache.org/licenses/LICENSE-2.0
    10  ;
    11  ;    Unless required by applicable law or agreed to in writing, software
    12  ;    distributed under the License is distributed on an "AS IS" BASIS,
    13  ;    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14  ;    See the License for the specific language governing permissions and
    15  ;    limitations under the License.
    16  ;
    17  
    18  [tox]
    19  # new environments will be excluded by default unless explicitly added to envlist.
    20  envlist = py37,py38,py39,py310,py311,py37-{cloud,cython,lint,mypy,dask},py38-{cloud,cython,docs,cloudcoverage,dask},py39-{cloud,cython},py310-{cloud,cython,dask},py311-{cloud,cython,dask},whitespacelint
    21  toxworkdir = {toxinidir}/target/{env:ENV_NAME:.tox}
    22  
    23  [pycodestyle]
    24  # Disable all errors and warnings except for the ones related to blank lines.
    25  # pylint does not check the number of blank lines.
    26  select = E3
    27  
    28  # Shared environment options.
    29  [testenv]
    30  # Run the tests using pre-released dependencies.
    31  # https://github.com/apache/beam/issues/25668
    32  pip_pre = True
    33  # allow apps that support color to use it.
    34  passenv=TERM
    35  # Set [] options for pip installation of apache-beam tarball.
    36  extras = test,dataframe
    37  # Don't warn that these commands aren't installed.
    38  allowlist_externals =
    39    false
    40    time
    41    bash
    42    rm
    43  deps =
    44    cython: cython==0.29.33
    45    -r build-requirements.txt
    46  setenv =
    47    RUN_SKIPPED_PY3_TESTS=0
    48    # Use an isolated tmp dir for tests that get slowed down by scanning /tmp.
    49    TMPDIR={envtmpdir}
    50    # Silence warning about ignoring PYTHONPATH.
    51    PYTHONPATH=
    52  
    53  # These 2 magic command overrides are required for Jenkins builds.
    54  # Otherwise we get "OSError: [Errno 2] No such file or directory" errors.
    55  # Source:
    56  # https://github.com/tox-dev/tox/issues/123#issuecomment-284714629
    57  install_command = {envbindir}/python {envbindir}/pip install --retries 10 {opts} {packages}
    58  list_dependencies_command = {envbindir}/python {envbindir}/pip freeze
    59  commands_pre =
    60    python --version
    61    pip --version
    62    pip check
    63    bash {toxinidir}/scripts/run_tox_cleanup.sh
    64  commands_post =
    65    bash {toxinidir}/scripts/run_tox_cleanup.sh
    66  commands = false {envname} is misconfigured
    67  
    68  [testenv:py{37,38,39,310,311}]
    69  commands =
    70    python apache_beam/examples/complete/autocomplete_test.py
    71    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
    72  
    73  [testenv:py{37,38,39,310,311}-win]
    74  commands =
    75    python apache_beam/examples/complete/autocomplete_test.py
    76    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
    77  install_command = {envbindir}/python.exe {envbindir}/pip.exe install --retries 10 {opts} {packages}
    78  list_dependencies_command = {envbindir}/python.exe {envbindir}/pip.exe freeze
    79  
    80  [testenv:py{37,38,39,310,311}-cython]
    81  # cython tests are only expected to work in linux (2.x and 3.x)
    82  # If we want to add other platforms in the future, it should be:
    83  # `platform = linux2|darwin|...`
    84  # See https://docs.python.org/2/library/sys.html#sys.platform for platform codes
    85  platform = linux
    86  commands =
    87    # TODO(https://github.com/apache/beam/issues/20051): Remove this build_ext invocation once local source no longer
    88    #   shadows the installed apache_beam.
    89    python setup.py build_ext --inplace
    90    python apache_beam/examples/complete/autocomplete_test.py
    91    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
    92  
    93  [testenv:py{37,38,39,310,311}-cloud]
    94  extras = test,gcp,interactive,dataframe,aws,azure
    95  commands =
    96    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
    97  
    98  [testenv:py{37,38,39,310,311}-dask]
    99  extras = test,dask
   100  commands =
   101    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
   102  [testenv:py38-cloudcoverage]
   103  deps =
   104    pytest-cov==3.0.0
   105  platform = linux
   106  passenv = GIT_* BUILD_* ghprb* CHANGE_ID BRANCH_NAME JENKINS_* CODECOV_*
   107  extras = test,gcp,interactive,dataframe,aws
   108  commands =
   109    -rm .coverage
   110    curl -Os https://uploader.codecov.io/latest/linux/codecov
   111    chmod +x codecov
   112    bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" "--cov-report=xml --cov=. --cov-append"
   113    ./codecov -F python
   114    -rm codecov
   115  
   116  [testenv:py37-lint]
   117  # Don't set TMPDIR to avoid "AF_UNIX path too long" errors in pylint.
   118  setenv =
   119  # keep the version of pylint in sync with the 'rev' in .pre-commit-config.yaml
   120  deps =
   121    -r build-requirements.txt
   122    astroid<2.9,>=2.8.0
   123    pycodestyle==2.8.0
   124    pylint==2.11.1
   125    isort==4.2.15
   126    flake8==4.0.1
   127  commands =
   128    pylint --version
   129    time {toxinidir}/scripts/run_pylint.sh
   130  
   131  [testenv:whitespacelint]
   132  setenv =
   133  deps =
   134    whitespacelint==1.1.0
   135  commands =
   136    time {toxinidir}/scripts/run_whitespacelint.sh
   137  
   138  [testenv:py37-mypy]
   139  deps =
   140    -r build-requirements.txt
   141    mypy==0.790
   142    dask==2022.01.0
   143    distributed==2022.01.0
   144  # make extras available in case any of these libs are typed
   145  extras =
   146    gcp
   147  commands =
   148    mypy --version
   149    python setup.py mypy
   150  
   151  
   152  [testenv:py38-docs]
   153  extras = test,gcp,docs,interactive,dataframe,dask
   154  deps =
   155    Sphinx==1.8.5
   156    sphinx_rtd_theme==0.4.3
   157    docutils<0.18
   158    Jinja2==3.0.3 # TODO(https://github.com/apache/beam/issues/21587): Sphinx version is too old.
   159    torch
   160    xgboost
   161    datatable==1.0.0
   162  commands =
   163    time {toxinidir}/scripts/generate_pydoc.sh
   164  
   165  [testenv:hdfs_integration_test]
   166  # Used by hdfs_integration_test.sh. Do not run this directly, as it depends on
   167  # nodes defined in hdfs_integration_test/docker-compose.yml.
   168  deps =
   169    -r build-requirements.txt
   170    holdup==1.8.0
   171  extras =
   172    gcp
   173  allowlist_externals =
   174    echo
   175    sleep
   176  passenv = HDFSCLI_CONFIG
   177  commands =
   178    holdup -t 45 http://namenode:50070 http://datanode:50075
   179    echo "Waiting for safe mode to end."
   180    sleep 45
   181    wget storage.googleapis.com/dataflow-samples/shakespeare/kinglear.txt
   182    hdfscli -v -v -v upload -f kinglear.txt /
   183    python -m apache_beam.examples.wordcount \
   184        --input hdfs://kinglear* \
   185        --output hdfs://py-wordcount-integration \
   186        --hdfs_host namenode --hdfs_port 50070 --hdfs_user root
   187    python -m apache_beam.examples.wordcount \
   188        --input hdfs://unused_server/kinglear* \
   189        --output hdfs://unused_server/py-wordcount-integration \
   190        --hdfs_host namenode --hdfs_port 50070 --hdfs_user root --hdfs_full_urls
   191  commands_pre =
   192    pip check
   193  
   194  [testenv:azure_integration_test]
   195  # Used by azure/integration_test/azure_integration_test.sh.
   196  # Do not run this directly, as it depends on nodes defined in
   197  # azure/integration_test/docker-compose.yml.
   198  deps =
   199    -r build-requirements.txt
   200  extras =
   201    azure
   202  allowlist_externals =
   203    echo
   204    sleep
   205  passenv = REQUESTS_CA_BUNDLE
   206  setenv =
   207    CONNECTION_STRING=DefaultEndpointsProtocol=https;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=https://azurite:10000/devstoreaccount1;
   208  commands_pre =
   209    pip check
   210    wget storage.googleapis.com/dataflow-samples/shakespeare/kinglear.txt
   211    # Create container for storing files.
   212    az storage container create -n container --connection-string {env:CONNECTION_STRING}
   213    # Upload test file.
   214    az storage blob upload -f kinglear.txt -c container -n kinglear.txt --connection-string {env:CONNECTION_STRING}
   215  commands =
   216    # Test --azure_connection_string
   217    python -m apache_beam.examples.wordcount \
   218        --input azfs://devstoreaccount1/container/* \
   219        --output azfs://devstoreaccount1/container/py-wordcount-integration \
   220        --azure_connection_string {env:CONNECTION_STRING}
   221    # This doesn't work because there's no way to send a fake bearer token to
   222    # Azurite when using DefaultAzureCredential.
   223    # See https://github.com/Azure/Azurite/issues/389#issuecomment-615298432
   224    # and https://github.com/Azure/Azurite/issues/1750#issue-1449778593
   225    #python -m apache_beam.examples.wordcount \
   226    #    --input azfs://devstoreaccount1/container/* \
   227    #    --output azfs://devstoreaccount1/container/py-wordcount-integration \
   228    #    --blob_service_endpoint https://azurite:10000/devstoreaccount1/container-name \
   229    #    --azure_managed_identity_client_id "abc123"
   230  
   231  [testenv:py3-yapf]
   232  # keep the version of yapf in sync with the 'rev' in .pre-commit-config.yaml
   233  deps =
   234    yapf==0.29.0
   235  commands =
   236    yapf --version
   237    time yapf --in-place --parallel --recursive apache_beam
   238  
   239  [testenv:py3-yapf-check]
   240  # keep the version of yapf in sync with the 'rev' in .pre-commit-config.yaml
   241  deps =
   242    yapf==0.29.0
   243  commands =
   244    yapf --version
   245    time yapf --diff --parallel --recursive apache_beam
   246  
   247  [testenv:py3-dependency-check]
   248  # TODO(https://github.com/apache/beam/issues/20337): botocore, a part of [aws], wants docutils<0.16, but Sphinx
   249  # pulls in the latest docutils. Uncomment this line once botocore does not
   250  # conflict with Sphinx:
   251  # extras = docs,test,gcp,aws,interactive,interactive_test
   252  extras = test,gcp,aws,dataframe,interactive,interactive_test
   253  passenv = WORKSPACE
   254  commands =
   255    time {toxinidir}/scripts/run_dependency_check.sh
   256  
   257  [testenv:jest]
   258  setenv =
   259  deps =
   260    jupyterlab==3.1.18
   261  commands =
   262    time {toxinidir}/scripts/setup_nodejs.sh
   263    time {toxinidir}/scripts/run_jest.sh
   264  
   265  [testenv:eslint]
   266  setenv =
   267  deps =
   268    jupyterlab==3.1.18
   269  commands =
   270    time {toxinidir}/scripts/setup_nodejs.sh
   271    time {toxinidir}/scripts/run_eslint.sh
   272  
   273  [testenv:flink-runner-test]
   274  extras = test
   275  commands =
   276    bash {toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/flink_runner_test.py {posargs}
   277  
   278  [testenv:samza-runner-test]
   279  extras = test
   280  commands =
   281    bash {toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/samza_runner_test.py {posargs}
   282  
   283  [testenv:spark-runner-test]
   284  extras = test
   285  commands =
   286    bash {toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/spark_runner_test.py {posargs}
   287  
   288  [testenv:py{37,38,39,310}-pyarrow-{3,4,5,6,7,8,9}]
   289  deps =
   290    3: pyarrow>=3,<4
   291    4: pyarrow>=4,<5
   292    5: pyarrow>=5,<6
   293    6: pyarrow>=6,<7
   294    7: pyarrow>=7,<8
   295    8: pyarrow>=8,<9
   296    9: pyarrow>=9,<10
   297  commands =
   298    # Log pyarrow and numpy version for debugging
   299    /bin/sh -c "pip freeze | grep -E '(pyarrow|numpy)'"
   300    # Run pytest directly rather using run_pytest.sh. It doesn't handle
   301    # selecting tests with -m (BEAM-12985).
   302    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   303    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'
   304  
   305  [testenv:py{37,38,39,310,311}-pyarrow-{10,11}]
   306  deps =
   307    10: pyarrow>=10,<11
   308    11: pyarrow>=11,<12
   309  commands =
   310    # Log pyarrow and numpy version for debugging
   311    /bin/sh -c "pip freeze | grep -E '(pyarrow|numpy)'"
   312    # Run pytest directly rather using run_pytest.sh. It doesn't handle
   313    # selecting tests with -m (BEAM-12985).
   314    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   315    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'
   316  
   317  
   318  [testenv:py{37,38,39,310,311}-pandas-{14,15}]
   319  deps =
   320    14: pandas>=1.4.3,<1.5.0
   321    # Exclude 1.5.0 and 1.5.1 because of https://github.com/pandas-dev/pandas/issues/45725
   322    15: pandas>=1.5.2,<1.6.0
   323  commands =
   324    # Log pandas and numpy version for debugging
   325    /bin/sh -c "pip freeze | grep -E '(pandas|numpy)'"
   326    # Run all DataFrame API unit tests
   327    bash {toxinidir}/scripts/run_pytest.sh {envname} 'apache_beam/dataframe'
   328  
   329  [testenv:py{37,38,39,310,311}-pytorch-{19,110,111,112,113}]
   330  deps =
   331    -r build-requirements.txt
   332    19: torch>=1.9.0,<1.10.0
   333    110: torch>=1.10.0,<1.11.0
   334    111: torch>=1.11.0,<1.12.0
   335    112: torch>=1.12.0,<1.13.0
   336    113: torch>=1.13.0,<1.14.0
   337  extras = test,gcp
   338  commands =
   339    # Log torch version for debugging
   340    /bin/sh -c "pip freeze | grep -E torch"
   341    # Run all PyTorch<2 unit tests
   342    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   343    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'
   344  
   345  [testenv:py{38,39,310}-pytorch-200]
   346  deps =
   347    -r build-requirements.txt
   348    200: torch>=2.0.0,<2.1.0
   349  extras = test,gcp
   350  commands =
   351    # Log torch version for debugging
   352    /bin/sh -c "pip freeze | grep -E torch"
   353    # Run all PyTorch>=2  unit tests
   354    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   355    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'
   356  
   357  # TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task in tox/py38/build.gradle once onnx supports protobuf 4.x.x
   358  [testenv:py{37,38,39,310}-onnx-113]
   359  # TODO(https://github.com/apache/beam/issues/25443)
   360  # apparently tox has problem when substitution key has single value. Change back to -onnx-{113,...}
   361  # when multiple onnx versions are tested.
   362  deps =
   363    onnxruntime==1.13.1
   364    pandas==1.5.2
   365    torch==1.13.1
   366    tensorflow==2.11.0
   367    tf2onnx==1.13.0
   368    skl2onnx==1.13
   369    transformers==4.25.1
   370  extras = test,gcp
   371  commands =
   372    # Log onnx version for debugging
   373    /bin/sh -c "pip freeze | grep -E onnx"
   374    # Run all ONNX unit tests
   375    pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_onnx {posargs}
   376  
   377  [testenv:py{38,39,310}-tensorflow-212]
   378  deps =
   379    -r build-requirements.txt
   380    212: tensorflow>=2.12rc1,<2.13
   381  extras = test,gcp
   382  commands =
   383    # Log tensorflow version for debugging
   384    /bin/sh -c "pip freeze | grep -E tensorflow"
   385    # Run all Tensorflow unit tests
   386    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   387    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_tf {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'
   388  
   389  [testenv:py{37,38,39,310}-xgboost-{160,170}]
   390  deps =
   391    -r build-requirements.txt
   392    160:
   393      xgboost>=1.6.0,<1.7.0
   394      datatable==1.0.0
   395    170:
   396      xgboost>=1.7.0
   397      datatable==1.0.0
   398  extras = test,gcp
   399  commands =
   400    # Log XGBoost version for debugging
   401    /bin/sh -c "pip freeze | grep -E xgboost"
   402    # Run all XGBoost unit tests
   403    # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories.
   404    /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_xgboost {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret'