├── BUILD.bazel ├── tests ├── __init__.py ├── test_atds_avro │ ├── __init__.py │ └── utils │ │ ├── __init__.py │ │ ├── generator │ │ ├── __init__.py │ │ └── sparse_util.py │ │ └── hash_util.py ├── test_bigtable │ └── __init__.py ├── test_csv │ └── null.csv ├── test_avro │ ├── test.bin │ ├── weather.avro │ ├── cpx.json │ └── weather.avsc ├── test_hdf5 │ ├── tdset.h5 │ ├── tbinary.h5 │ ├── compressed_h5.h5 │ └── h5ex_g_traverse.h5 ├── test_image │ ├── img.jp2 │ ├── cradle.gif │ ├── lena.bmp │ ├── sample.png │ ├── small.tiff │ ├── glacier.exr │ ├── glacier.hdr │ ├── sample.webp │ ├── small-00.png │ ├── small-01.png │ ├── small-02.png │ ├── small-03.png │ ├── small-04.png │ ├── small-bb.png │ ├── Jelly-Beans.jp2 │ ├── fallout-4.jpg │ ├── Jelly-Beans.nv12 │ ├── Jelly-Beans.tiff │ ├── Jelly-Beans.yuy2 │ ├── down-mirrored.jpg │ ├── Jelly-Beans.jp2.png │ ├── Jelly-Beans.nv12.png │ ├── Jelly-Beans.yuy2.png │ ├── GeogToWGS84GeoKey5.png │ ├── GeogToWGS84GeoKey5.tif │ ├── GeogToWGS84GeoKey5.txt │ ├── kodim03_yuv420_8bpc.avif │ ├── kodim03_yuv420_8bpc.png │ ├── multipage_tiff_example.tif │ ├── d-1316653631.269651-68451027.pgm │ ├── d-1316653631.269651-68451027.png │ ├── r-1316653631.481244-81973200.png │ ├── r-1316653631.481244-81973200.ppm │ ├── IXMtest_A01_s1_w164FBEEF7-F77C-4892-86F5-72D0160D4FB2.tif │ └── GeogToWGS84GeoKey5.lgo ├── test_lmdb │ └── data.mdb ├── test_orc │ └── iris.orc ├── test_audio │ ├── l1-fl6.bit │ ├── l1-fl6.pcm │ ├── l1-fl6.raw │ ├── mono_10khz.wav │ ├── gs-16b-2c-44100hz.mp4 │ ├── gs-16b-2c-44100hz.wav │ ├── ZASFX_ADSR_no_sustain.ogg │ ├── ZASFX_ADSR_no_sustain.wav │ ├── ZASFX_ADSR_no_sustain.flac │ ├── ZASFX_ADSR_no_sustain.f32.wav │ ├── ZASFX_ADSR_no_sustain.s24.flac │ ├── ZASFX_ADSR_no_sustain.s24.wav │ ├── ZASFX_ADSR_no_sustain.u8.flac │ ├── ZASFX_ADSR_no_sustain.u8.wav │ ├── gs-16b-2c-44100hz.encoded.wav │ ├── gs-16b-2c-44100hz.ffmpeg.wav │ ├── ZASFX_ADSR_no_sustain.s16le.pcm │ ├── gs-16b-2c-44100hz.encoded.ffmpeg.wav │ └── ZASFX_ADSR_no_sustain-4410-quality-default.wav ├── test_libsvm │ └── sample ├── test_mnist │ ├── mnist.npz │ ├── t10k-images-idx3-ubyte │ ├── t10k-images-idx3-ubyte.gz │ └── t10k-labels-idx1-ubyte.gz ├── test_pcap │ └── http.pcap ├── test_video │ └── small.mp4 ├── test_text │ ├── lorem.txt.gz │ ├── attack-trace.pcap │ ├── sample2.csv │ ├── sample.csv │ ├── sample1.csv │ └── stdin_test.py ├── test_dicom │ ├── OT-MONO2-8-a7.dcm │ ├── OT-PAL-8-face.dcm │ ├── CT-MONO2-16-ort.dcm │ ├── CT-MONO2-8-abdo.dcm │ ├── MR-MONO2-12-an2.dcm │ ├── MR-MONO2-16-head.dcm │ ├── MR-MONO2-16-knee.dcm │ ├── OT-MONO2-8-colon.dcm │ ├── OT-MONO2-8-hip.dcm │ ├── US-RGB-8-epicard.dcm │ ├── CR-MONO1-10-chest.dcm │ ├── CT-MONO2-16-ankle.dcm │ ├── CT-MONO2-16-brain.dcm │ ├── CT-MONO2-16-chest.dcm │ ├── US-PAL-8-10x-echo.dcm │ ├── US-RGB-8-esopecho.dcm │ ├── MR-MONO2-12-shoulder.dcm │ ├── MR-MONO2-8-16x-heart.dcm │ ├── NM-MONO2-16-13x-heart.dcm │ ├── US-MONO2-8-8x-execho.dcm │ ├── XA-MONO2-8-12x-catheter.dcm │ ├── TOSHIBA_J2K_OpenJPEGv2Regression.dcm │ ├── 2.25.304589190180579357564631626197663875025.dcm │ └── dicom_samples.sh ├── test_json │ ├── label.ndjson │ ├── feature.ndjson │ ├── feature.json │ └── label.json ├── test_gcloud │ ├── testbench │ │ ├── requirements.txt │ │ ├── README.md │ │ └── error_response.py │ └── test_gcs.sh ├── test_parquet │ ├── parquet_cpp_example.parquet │ ├── parquet_cpp_example.parquet.gz │ ├── parquet_cpp_example.parquet.tar.gz │ ├── part-00000-ca0e89bf-ccd7-47e1-925c-9b42c8716c84-c000.snappy.parquet │ └── parquet_cpp_example.patch ├── test_obj │ └── sample.obj ├── test_azure │ └── start_azure.sh ├── test_sql │ └── run.sql ├── test_genome │ └── test.fastq ├── conftest.py ├── test_aws │ └── aws_test.sh ├── test_version.py ├── test_filesystem.py ├── test_obj.py ├── test_hdfs │ └── hdfs_test.sh └── test_mongodb │ └── mongodb_test.sh ├── .bazelversion ├── tensorflow_io ├── core │ ├── __init__.py │ ├── grpc │ │ ├── endpoint.proto │ │ ├── __init__.py │ │ └── BUILD │ ├── filesystems │ │ ├── oss │ │ │ └── BUILD │ │ ├── az │ │ │ └── BUILD │ │ ├── hdfs │ │ │ └── BUILD │ │ ├── http │ │ │ └── BUILD │ │ ├── s3 │ │ │ └── BUILD │ │ └── BUILD │ ├── ops │ │ ├── bigquery_test_ops.cc │ │ ├── filesystem_ops.cc │ │ ├── genome_ops.cc │ │ └── obj_ops.cc │ └── kernels │ │ ├── gsmemcachedfs │ │ ├── gce_memcached_server_list_provider.h │ │ └── gs_memcached_file_system.cc │ │ └── avro │ │ └── utils │ │ ├── BUILD │ │ ├── parse_avro_attrs.h │ │ └── parse_avro_attrs.cc ├── python │ ├── __init__.py │ ├── experimental │ │ ├── __init__.py │ │ ├── atds │ │ │ └── __init__.py │ │ ├── ffmpeg_ops.py │ │ ├── filesystem_ops.py │ │ ├── file_dataset_ops.py │ │ ├── varlen_feature_with_rank.py │ │ └── io_tensor.py │ ├── ops │ │ ├── bigtable │ │ │ └── __init__.py │ │ ├── version_ops.py │ │ ├── golang_ops.py │ │ └── archive_ops.py │ ├── utils │ │ └── __init__.py │ └── api │ │ ├── version.py │ │ ├── experimental │ │ ├── ffmpeg.py │ │ ├── filesystem.py │ │ ├── elasticsearch.py │ │ ├── filter.py │ │ ├── text.py │ │ ├── mongodb.py │ │ ├── serialization.py │ │ ├── image.py │ │ ├── color.py │ │ ├── streaming.py │ │ └── columnar.py │ │ ├── image.py │ │ ├── genome.py │ │ ├── bigtable.py │ │ ├── audio.py │ │ └── __init__.py ├── __init__.py ├── kafka.py ├── bigquery.py └── arrow.py ├── third_party ├── toolchains │ ├── gpu │ │ ├── BUILD │ │ ├── cuda │ │ │ ├── BUILD │ │ │ └── cuda_config.h.tpl │ │ └── crosstool │ │ │ └── BUILD │ ├── tf │ │ └── BUILD │ ├── gcc7_manylinux2010 │ │ ├── tools │ │ │ └── cpp │ │ │ │ └── empty.cc │ │ ├── WORKSPACE │ │ ├── cc_wrapper.sh │ │ └── dummy_toolchain.bzl │ └── gcc10_manylinux2014_aarch64 │ │ └── WORKSPACE ├── minimp4.BUILD ├── minimp3.BUILD ├── rapidjson.BUILD ├── tinyobjloader.BUILD ├── stb.BUILD ├── easyexif.BUILD ├── dlfcn-win32.BUILD ├── hadoop.BUILD ├── bzip2.BUILD ├── aws-c-event-stream.BUILD ├── libyuv.BUILD ├── brotli.BUILD ├── libexpat.BUILD ├── lmdb.BUILD ├── avro.BUILD ├── BUILD ├── fmjpeg2koj.BUILD ├── lz4.BUILD ├── expand_template.bzl ├── mxml.BUILD ├── zlib.BUILD ├── vorbis.BUILD ├── libavif.BUILD ├── xsimd.BUILD ├── libwebp.BUILD ├── zstd.BUILD ├── snappy.BUILD ├── libgav1.BUILD ├── ogg.BUILD ├── aws-c-common.BUILD ├── boost.BUILD ├── libgeotiff.BUILD ├── uuid.BUILD └── oss_c_sdk.BUILD ├── tensorflow_io_gcs_filesystem ├── core │ ├── __init__.py │ ├── python │ │ └── __init__.py │ ├── file_system_plugin_gs.h │ ├── gcs_helper.h │ └── BUILD └── __init__.py ├── docs ├── tutorials │ ├── prometheus │ │ ├── Corefile │ │ └── prometheus.yml │ ├── avro │ │ ├── train.avro │ │ └── train.avsc │ ├── dicom │ │ └── dicom_00000001_000.dcm │ └── _toc.yaml ├── images │ └── vscode_debugger.png ├── _toc.yaml └── overview.md ├── R-package ├── .gitignore ├── docs │ ├── pkgdown.yml │ ├── extra.js │ ├── articles │ │ └── introduction_files │ │ │ └── elevate-section-attrs-2.0 │ │ │ └── elevate-section-attrs.js │ ├── link.svg │ └── extra.css ├── tests │ ├── testthat.R │ └── testthat │ │ ├── testdata │ │ ├── data.mdb │ │ ├── small.mp4 │ │ ├── small.tiff │ │ ├── string.seq │ │ ├── sample.webp │ │ └── parquet_cpp_example.parquet │ │ └── utils.R ├── README.md ├── .Rbuildignore ├── R │ ├── core_wrappers.R │ └── dataset_utils.R ├── man │ ├── pipe.Rd │ ├── tfio.Rd │ ├── from_schema.Rd │ ├── mnist_image_dataset.Rd │ ├── mnist_label_dataset.Rd │ ├── pubsub_dataset.Rd │ ├── tiff_dataset.Rd │ ├── webp_dataset.Rd │ ├── lmdb_dataset.Rd │ ├── video_dataset.Rd │ ├── sequence_file_dataset.Rd │ ├── kinesis_dataset.Rd │ ├── from_schema.arrow_stream_dataset.Rd │ ├── from_schema.arrow_feather_dataset.Rd │ ├── kafka_dataset.Rd │ ├── parquet_dataset.Rd │ ├── arrow_stream_dataset.Rd │ └── arrow_feather_dataset.Rd ├── cran-comments.md ├── tfio.Rproj ├── pkgdown │ ├── extra.js │ ├── _pkgdown.yml │ └── extra.css ├── scripts │ └── Dockerfile ├── index.Rmd └── DESCRIPTION ├── tools ├── lint │ ├── requirements.txt │ ├── black_python.py │ ├── pyupgrade_python.py │ └── BUILD ├── build │ ├── auditwheel │ ├── swift │ │ └── BUILD │ └── tensorflow_io.bzl └── docker │ ├── build.Dockerfile │ ├── README.md │ ├── cpu.Dockerfile │ ├── nightly.Dockerfile │ ├── tests │ ├── dockerfile_cpu_test.sh │ ├── dockerfile_nightly_test.sh │ ├── dockerfile_devel_test.sh │ └── bazel_build.sh │ └── devel.Dockerfile ├── .gitattributes ├── .github ├── workflows │ ├── build.space.sh │ ├── build.instruction.py │ └── release.note.yml └── dependabot.yml ├── SECURITY.md ├── .gitignore ├── configure.sh ├── CONTRIBUTING.md └── .kokorun └── io_gpu.sh /BUILD.bazel: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.bazelversion: -------------------------------------------------------------------------------- 1 | 6.5.0 2 | -------------------------------------------------------------------------------- /tensorflow_io/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io/python/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_atds_avro/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_bigtable/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/toolchains/gpu/BUILD: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/toolchains/tf/BUILD: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_atds_avro/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/toolchains/gpu/cuda/BUILD: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/toolchains/gpu/crosstool/BUILD: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io/python/ops/bigtable/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_atds_avro/utils/generator/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/atds/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/core/python/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_csv/null.csv: -------------------------------------------------------------------------------- 1 | C1,C2,C3 2 | 1,2,3 3 | 4,NaN,6 4 | 7,8,9 5 | -------------------------------------------------------------------------------- /third_party/toolchains/gcc7_manylinux2010/tools/cpp/empty.cc: -------------------------------------------------------------------------------- 1 | int main() {} 2 | -------------------------------------------------------------------------------- /docs/tutorials/prometheus/Corefile: -------------------------------------------------------------------------------- 1 | .:9053 { 2 | prometheus 3 | whoami 4 | } 5 | -------------------------------------------------------------------------------- /R-package/.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | .DS_Store 6 | -------------------------------------------------------------------------------- /tests/test_avro/test.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_avro/test.bin -------------------------------------------------------------------------------- /tests/test_hdf5/tdset.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_hdf5/tdset.h5 -------------------------------------------------------------------------------- /tests/test_image/img.jp2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/img.jp2 -------------------------------------------------------------------------------- /tests/test_lmdb/data.mdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_lmdb/data.mdb -------------------------------------------------------------------------------- /tests/test_orc/iris.orc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_orc/iris.orc -------------------------------------------------------------------------------- /tests/test_audio/l1-fl6.bit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/l1-fl6.bit -------------------------------------------------------------------------------- /tests/test_audio/l1-fl6.pcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/l1-fl6.pcm -------------------------------------------------------------------------------- /tests/test_audio/l1-fl6.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/l1-fl6.raw -------------------------------------------------------------------------------- /tests/test_hdf5/tbinary.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_hdf5/tbinary.h5 -------------------------------------------------------------------------------- /tests/test_image/cradle.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/cradle.gif -------------------------------------------------------------------------------- /tests/test_image/lena.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/lena.bmp -------------------------------------------------------------------------------- /tests/test_image/sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/sample.png -------------------------------------------------------------------------------- /tests/test_image/small.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small.tiff -------------------------------------------------------------------------------- /tests/test_libsvm/sample: -------------------------------------------------------------------------------- 1 | 1 1:3.4 2:0.5 4:0.231 2 | 1 2:2.5 3:inf 5:0.503 3 | 2 3:2.5 2:nan 1:0.105 4 | -------------------------------------------------------------------------------- /tests/test_mnist/mnist.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_mnist/mnist.npz -------------------------------------------------------------------------------- /tests/test_pcap/http.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_pcap/http.pcap -------------------------------------------------------------------------------- /tests/test_video/small.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_video/small.mp4 -------------------------------------------------------------------------------- /R-package/docs/pkgdown.yml: -------------------------------------------------------------------------------- 1 | pandoc: '2.9' 2 | pkgdown: 1.4.1 3 | pkgdown_sha: ~ 4 | articles: [] 5 | 6 | -------------------------------------------------------------------------------- /tests/test_avro/weather.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_avro/weather.avro -------------------------------------------------------------------------------- /tests/test_image/glacier.exr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/glacier.exr -------------------------------------------------------------------------------- /tests/test_image/glacier.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/glacier.hdr -------------------------------------------------------------------------------- /tests/test_image/sample.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/sample.webp -------------------------------------------------------------------------------- /tests/test_image/small-00.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-00.png -------------------------------------------------------------------------------- /tests/test_image/small-01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-01.png -------------------------------------------------------------------------------- /tests/test_image/small-02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-02.png -------------------------------------------------------------------------------- /tests/test_image/small-03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-03.png -------------------------------------------------------------------------------- /tests/test_image/small-04.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-04.png -------------------------------------------------------------------------------- /tests/test_image/small-bb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/small-bb.png -------------------------------------------------------------------------------- /tests/test_text/lorem.txt.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_text/lorem.txt.gz -------------------------------------------------------------------------------- /docs/images/vscode_debugger.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/docs/images/vscode_debugger.png -------------------------------------------------------------------------------- /docs/tutorials/avro/train.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/docs/tutorials/avro/train.avro -------------------------------------------------------------------------------- /tests/test_audio/mono_10khz.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/mono_10khz.wav -------------------------------------------------------------------------------- /tests/test_hdf5/compressed_h5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_hdf5/compressed_h5.h5 -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.jp2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.jp2 -------------------------------------------------------------------------------- /tests/test_image/fallout-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/fallout-4.jpg -------------------------------------------------------------------------------- /R-package/tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(tensorflow) 3 | library(tfio) 4 | 5 | test_check("tfio") 6 | -------------------------------------------------------------------------------- /tests/test_dicom/OT-MONO2-8-a7.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/OT-MONO2-8-a7.dcm -------------------------------------------------------------------------------- /tests/test_dicom/OT-PAL-8-face.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/OT-PAL-8-face.dcm -------------------------------------------------------------------------------- /tests/test_hdf5/h5ex_g_traverse.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_hdf5/h5ex_g_traverse.h5 -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.nv12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.nv12 -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.tiff -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.yuy2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.yuy2 -------------------------------------------------------------------------------- /tests/test_image/down-mirrored.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/down-mirrored.jpg -------------------------------------------------------------------------------- /tests/test_json/label.ndjson: -------------------------------------------------------------------------------- 1 | { "floatlabel": 2.2, "integerlabel": 3 } 2 | { "floatlabel": 1.2, "integerlabel": 3 } 3 | -------------------------------------------------------------------------------- /tests/test_text/attack-trace.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_text/attack-trace.pcap -------------------------------------------------------------------------------- /tests/test_dicom/CT-MONO2-16-ort.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CT-MONO2-16-ort.dcm -------------------------------------------------------------------------------- /tests/test_dicom/CT-MONO2-8-abdo.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CT-MONO2-8-abdo.dcm -------------------------------------------------------------------------------- /tests/test_dicom/MR-MONO2-12-an2.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/MR-MONO2-12-an2.dcm -------------------------------------------------------------------------------- /tests/test_dicom/MR-MONO2-16-head.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/MR-MONO2-16-head.dcm -------------------------------------------------------------------------------- /tests/test_dicom/MR-MONO2-16-knee.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/MR-MONO2-16-knee.dcm -------------------------------------------------------------------------------- /tests/test_dicom/OT-MONO2-8-colon.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/OT-MONO2-8-colon.dcm -------------------------------------------------------------------------------- /tests/test_dicom/OT-MONO2-8-hip.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/OT-MONO2-8-hip.dcm -------------------------------------------------------------------------------- /tests/test_dicom/US-RGB-8-epicard.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/US-RGB-8-epicard.dcm -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.jp2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.jp2.png -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.nv12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.nv12.png -------------------------------------------------------------------------------- /tests/test_image/Jelly-Beans.yuy2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/Jelly-Beans.yuy2.png -------------------------------------------------------------------------------- /tests/test_text/sample2.csv: -------------------------------------------------------------------------------- 1 | 15,10000000000,3.0,4.0e30,"col 5 string 1" 2 | 30,20000000000,6.0,8.0e30,"col 5 string 2" 3 | -------------------------------------------------------------------------------- /tests/test_audio/gs-16b-2c-44100hz.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/gs-16b-2c-44100hz.mp4 -------------------------------------------------------------------------------- /tests/test_audio/gs-16b-2c-44100hz.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/gs-16b-2c-44100hz.wav -------------------------------------------------------------------------------- /tests/test_dicom/CR-MONO1-10-chest.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CR-MONO1-10-chest.dcm -------------------------------------------------------------------------------- /tests/test_dicom/CT-MONO2-16-ankle.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CT-MONO2-16-ankle.dcm -------------------------------------------------------------------------------- /tests/test_dicom/CT-MONO2-16-brain.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CT-MONO2-16-brain.dcm -------------------------------------------------------------------------------- /tests/test_dicom/CT-MONO2-16-chest.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/CT-MONO2-16-chest.dcm -------------------------------------------------------------------------------- /tests/test_dicom/US-PAL-8-10x-echo.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/US-PAL-8-10x-echo.dcm -------------------------------------------------------------------------------- /tests/test_dicom/US-RGB-8-esopecho.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/US-RGB-8-esopecho.dcm -------------------------------------------------------------------------------- /tests/test_image/GeogToWGS84GeoKey5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/GeogToWGS84GeoKey5.png -------------------------------------------------------------------------------- /tests/test_image/GeogToWGS84GeoKey5.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/GeogToWGS84GeoKey5.tif -------------------------------------------------------------------------------- /tests/test_image/GeogToWGS84GeoKey5.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/GeogToWGS84GeoKey5.txt -------------------------------------------------------------------------------- /tests/test_json/feature.ndjson: -------------------------------------------------------------------------------- 1 | { "floatfeature": 1.1, "integerfeature": 2 } 2 | { "floatfeature": 2.1, "integerfeature": 3 } 3 | -------------------------------------------------------------------------------- /tests/test_mnist/t10k-images-idx3-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_mnist/t10k-images-idx3-ubyte -------------------------------------------------------------------------------- /tests/test_text/sample.csv: -------------------------------------------------------------------------------- 1 | 1,2,3 2 | 4,5,6 3 | 7,8,9 4 | 1,2,3 5 | 4,5,6 6 | 7,8,9 7 | 1,2,3 8 | 4,5,6 9 | 7,8,9 10 | -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/data.mdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/data.mdb -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.ogg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.ogg -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.wav -------------------------------------------------------------------------------- /tests/test_dicom/MR-MONO2-12-shoulder.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/MR-MONO2-12-shoulder.dcm -------------------------------------------------------------------------------- /tests/test_dicom/MR-MONO2-8-16x-heart.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/MR-MONO2-8-16x-heart.dcm -------------------------------------------------------------------------------- /tests/test_dicom/NM-MONO2-16-13x-heart.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/NM-MONO2-16-13x-heart.dcm -------------------------------------------------------------------------------- /tests/test_dicom/US-MONO2-8-8x-execho.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/US-MONO2-8-8x-execho.dcm -------------------------------------------------------------------------------- /tests/test_image/kodim03_yuv420_8bpc.avif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/kodim03_yuv420_8bpc.avif -------------------------------------------------------------------------------- /tests/test_image/kodim03_yuv420_8bpc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/kodim03_yuv420_8bpc.png -------------------------------------------------------------------------------- /tests/test_mnist/t10k-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_mnist/t10k-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /tests/test_mnist/t10k-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_mnist/t10k-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/small.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/small.mp4 -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/small.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/small.tiff -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/string.seq: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/string.seq -------------------------------------------------------------------------------- /docs/tutorials/dicom/dicom_00000001_000.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/docs/tutorials/dicom/dicom_00000001_000.dcm -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.flac: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.flac -------------------------------------------------------------------------------- /tests/test_dicom/XA-MONO2-8-12x-catheter.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/XA-MONO2-8-12x-catheter.dcm -------------------------------------------------------------------------------- /tests/test_gcloud/testbench/requirements.txt: -------------------------------------------------------------------------------- 1 | crc32c==2.1 2 | flask==2.3.2 3 | greenlet==0.4.17 4 | gevent==23.9.1 5 | gunicorn==23.0.0 6 | -------------------------------------------------------------------------------- /tests/test_image/multipage_tiff_example.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/multipage_tiff_example.tif -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/sample.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/sample.webp -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.f32.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.f32.wav -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.s24.flac: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.s24.flac -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.s24.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.s24.wav -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.u8.flac: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.u8.flac -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.u8.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.u8.wav -------------------------------------------------------------------------------- /tests/test_audio/gs-16b-2c-44100hz.encoded.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/gs-16b-2c-44100hz.encoded.wav -------------------------------------------------------------------------------- /tests/test_audio/gs-16b-2c-44100hz.ffmpeg.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/gs-16b-2c-44100hz.ffmpeg.wav -------------------------------------------------------------------------------- /tests/test_parquet/parquet_cpp_example.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_parquet/parquet_cpp_example.parquet -------------------------------------------------------------------------------- /R-package/README.md: -------------------------------------------------------------------------------- 1 | ## R interface to TensorFlow IO 2 | 3 | This is the R interface to datasets and filesystem extensions maintained by SIG-IO. 4 | -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain.s16le.pcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain.s16le.pcm -------------------------------------------------------------------------------- /tests/test_image/d-1316653631.269651-68451027.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/d-1316653631.269651-68451027.pgm -------------------------------------------------------------------------------- /tests/test_image/d-1316653631.269651-68451027.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/d-1316653631.269651-68451027.png -------------------------------------------------------------------------------- /tests/test_image/r-1316653631.481244-81973200.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/r-1316653631.481244-81973200.png -------------------------------------------------------------------------------- /tests/test_image/r-1316653631.481244-81973200.ppm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/r-1316653631.481244-81973200.ppm -------------------------------------------------------------------------------- /tests/test_parquet/parquet_cpp_example.parquet.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_parquet/parquet_cpp_example.parquet.gz -------------------------------------------------------------------------------- /R-package/.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^man-roxygen/ 4 | scripts 5 | ^cran-comments\.md$ 6 | ^docs/ 7 | ^index.*$ 8 | ^pkgdown$ 9 | -------------------------------------------------------------------------------- /tests/test_audio/gs-16b-2c-44100hz.encoded.ffmpeg.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/gs-16b-2c-44100hz.encoded.ffmpeg.wav -------------------------------------------------------------------------------- /tests/test_dicom/TOSHIBA_J2K_OpenJPEGv2Regression.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/TOSHIBA_J2K_OpenJPEGv2Regression.dcm -------------------------------------------------------------------------------- /tests/test_parquet/parquet_cpp_example.parquet.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_parquet/parquet_cpp_example.parquet.tar.gz -------------------------------------------------------------------------------- /tests/test_text/sample1.csv: -------------------------------------------------------------------------------- 1 | "col1",col2,"col 3","col 4",col5 2 | 15,10000000000,3.0,4.0e30,"col 5 string 1" 3 | 30,20000000000,6.0,8.0e30,"col 5 string 2" 4 | -------------------------------------------------------------------------------- /tools/lint/requirements.txt: -------------------------------------------------------------------------------- 1 | black==24.3.0;python_version>="3.6" and sys.platform!="win32" 2 | pyupgrade==2.29.0;python_version>="3.6" and sys.platform!="win32" 3 | -------------------------------------------------------------------------------- /R-package/tests/testthat/testdata/parquet_cpp_example.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/R-package/tests/testthat/testdata/parquet_cpp_example.parquet -------------------------------------------------------------------------------- /third_party/toolchains/gcc7_manylinux2010/WORKSPACE: -------------------------------------------------------------------------------- 1 | # DO NOT EDIT: automatically generated WORKSPACE file for cc_autoconf rule 2 | workspace(name = "local_config_cc") 3 | -------------------------------------------------------------------------------- /tests/test_audio/ZASFX_ADSR_no_sustain-4410-quality-default.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_audio/ZASFX_ADSR_no_sustain-4410-quality-default.wav -------------------------------------------------------------------------------- /tests/test_dicom/2.25.304589190180579357564631626197663875025.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_dicom/2.25.304589190180579357564631626197663875025.dcm -------------------------------------------------------------------------------- /tests/test_json/feature.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "floatfeature": 1.1, 3 | "integerfeature": 2 4 | }, 5 | { 6 | "floatfeature": 2.1, 7 | "integerfeature": 3 8 | } 9 | ] 10 | -------------------------------------------------------------------------------- /tests/test_json/label.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "floatlabel": 2.2, 3 | "integerlabel": 3 4 | }, 5 | { 6 | "floatlabel": 1.2, 7 | "integerlabel": 3 8 | 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /third_party/toolchains/gcc10_manylinux2014_aarch64/WORKSPACE: -------------------------------------------------------------------------------- 1 | # DO NOT EDIT: automatically generated WORKSPACE file for cc_autoconf rule 2 | workspace(name = "local_config_cc") 3 | -------------------------------------------------------------------------------- /tests/test_image/IXMtest_A01_s1_w164FBEEF7-F77C-4892-86F5-72D0160D4FB2.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_image/IXMtest_A01_s1_w164FBEEF7-F77C-4892-86F5-72D0160D4FB2.tif -------------------------------------------------------------------------------- /tests/test_avro/cpx.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "cpx", 4 | "fields" : [ 5 | {"name": "re", "type": "double"}, 6 | {"name": "im", "type" : "double"} 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /tests/test_obj/sample.obj: -------------------------------------------------------------------------------- 1 | # Simple Wavefront file 2 | v -0.500000 0.000000 0.400000 3 | v -0.500000 0.000000 -0.800000 4 | v -0.500000 1.000000 -0.800000 5 | v -0.500000 1.000000 0.400000 6 | f -4 -3 -2 -1 7 | -------------------------------------------------------------------------------- /tests/test_parquet/part-00000-ca0e89bf-ccd7-47e1-925c-9b42c8716c84-c000.snappy.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/io/HEAD/tests/test_parquet/part-00000-ca0e89bf-ccd7-47e1-925c-9b42c8716c84-c000.snappy.parquet -------------------------------------------------------------------------------- /R-package/R/core_wrappers.R: -------------------------------------------------------------------------------- 1 | #' @title python 2 | #' 3 | #' @description 4 | #' 5 | #' @details 6 | #' 7 | 8 | 9 | #' 10 | #' @export 11 | { 12 | 13 | python_function_result <- tf_io$core$python( 14 | ) 15 | 16 | } 17 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | font_*.h linguist-generated=true 2 | dwaLookups.* linguist-generated=true 3 | b44ExpLogTable.* linguist-generated=true 4 | toFloat.* linguist-generated=true 5 | eLut.* linguist-generated=true 6 | *.inc linguist-generated=true 7 | -------------------------------------------------------------------------------- /third_party/minimp4.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # CC license 4 | 5 | cc_library( 6 | name = "minimp4", 7 | hdrs = [ 8 | "minimp4.h", 9 | ], 10 | copts = [], 11 | ) 12 | -------------------------------------------------------------------------------- /tests/test_azure/start_azure.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | set -o pipefail 3 | 4 | 5 | npm install azurite@3.14.3 6 | echo starting azurite-blob 7 | $(npm bin)/azurite-blob --loose & 8 | sleep 10 # Wait for storage emulator to start 9 | echo azurite-blob started successfully 10 | -------------------------------------------------------------------------------- /tests/test_avro/weather.avsc: -------------------------------------------------------------------------------- 1 | {"type": "record", "name": "test.Weather", 2 | "doc": "A weather reading.", 3 | "fields": [ 4 | {"name": "station", "type": "string", "order": "ignore"}, 5 | {"name": "time", "type": "long"}, 6 | {"name": "temp", "type": "int"} 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /third_party/minimp3.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # CC license 4 | 5 | cc_library( 6 | name = "minimp3", 7 | hdrs = [ 8 | "minimp3.h", 9 | "minimp3_ex.h", 10 | ], 11 | copts = [], 12 | ) 13 | -------------------------------------------------------------------------------- /docs/tutorials/avro/train.avsc: -------------------------------------------------------------------------------- 1 | {"name": "ImageDataset", "type": "record", "fields": [{"name": "features", "type": {"type": "array", "items": "int"}}, {"name": "label", "type": ["int", "null"]}, {"name": "dataType", "type": {"type": "enum", "name": "dataTypes", "symbols": ["TRAINING", "VALIDATION"]}}]} -------------------------------------------------------------------------------- /R-package/man/pipe.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/reexports.R 3 | \name{\%>\%} 4 | \alias{\%>\%} 5 | \title{Pipe operator} 6 | \usage{ 7 | lhs \%>\% rhs 8 | } 9 | \description{ 10 | See \code{\link[magrittr]{\%>\%}} for more details. 11 | } 12 | \keyword{internal} 13 | -------------------------------------------------------------------------------- /third_party/rapidjson.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # MIT/JSON license 4 | 5 | cc_library( 6 | name = "rapidjson", 7 | srcs = glob([ 8 | "include/**/*.h", 9 | ]), 10 | copts = [], 11 | includes = [ 12 | "include", 13 | ], 14 | ) 15 | -------------------------------------------------------------------------------- /third_party/tinyobjloader.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # MIT license 4 | 5 | cc_library( 6 | name = "tinyobjloader", 7 | srcs = [ 8 | "tiny_obj_loader.cc", 9 | ], 10 | hdrs = [ 11 | "tiny_obj_loader.h", 12 | ], 13 | copts = [], 14 | ) 15 | -------------------------------------------------------------------------------- /tensorflow_io/core/grpc/endpoint.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "google/protobuf/any.proto"; 4 | 5 | message Request { 6 | int64 offset = 1; 7 | int64 length = 2; 8 | } 9 | 10 | message Response { 11 | google.protobuf.Any record = 1; 12 | } 13 | 14 | service GRPCEndpoint { 15 | rpc ReadRecord(Request) returns (Response){} 16 | } 17 | 18 | -------------------------------------------------------------------------------- /third_party/stb.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # MIT License 4 | 5 | cc_library( 6 | name = "stb", 7 | srcs = [ 8 | "stb_image.h", 9 | ], 10 | hdrs = [], 11 | copts = [], 12 | defines = [], 13 | linkopts = [], 14 | visibility = ["//visibility:public"], 15 | ) 16 | -------------------------------------------------------------------------------- /third_party/easyexif.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # public domain 4 | 5 | cc_library( 6 | name = "easyexif", 7 | srcs = [ 8 | "exif.cpp", 9 | "exif.h", 10 | ], 11 | hdrs = [], 12 | copts = [], 13 | includes = [], 14 | linkopts = [], 15 | visibility = ["//visibility:public"], 16 | ) 17 | -------------------------------------------------------------------------------- /docs/tutorials/prometheus/prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 1s 3 | evaluation_interval: 1s 4 | alerting: 5 | alertmanagers: 6 | - static_configs: 7 | - targets: 8 | rule_files: 9 | scrape_configs: 10 | - job_name: 'prometheus' 11 | static_configs: 12 | - targets: ['localhost:9090'] 13 | - job_name: "coredns" 14 | static_configs: 15 | - targets: ['localhost:9153'] 16 | -------------------------------------------------------------------------------- /R-package/man/tfio.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/package.R 3 | \docType{package} 4 | \name{tfio} 5 | \alias{tfio} 6 | \title{TensorFlow IO API for R} 7 | \description{ 8 | This library provides an R interface to the 9 | \href{https://github.com/tensorflow/io}{TensorFlow IO} API 10 | that provides datasets and filesystem extensions maintained by SIG-IO. 11 | } 12 | -------------------------------------------------------------------------------- /tests/test_sql/run.sql: -------------------------------------------------------------------------------- 1 | show server_version; 2 | drop database test_db; 3 | create database test_db; 4 | \c test_db; 5 | drop table test_table; 6 | create table test_table(id bigint PRIMARY KEY, i32 int, i64 bigint, f32 float(4), f64 double precision); 7 | insert into test_table(id, i32, i64, f32, f64) select i, i+1000, i+2000, i+3000, i+4000 from generate_series(0, 9) s(i); 8 | select id, i32, i64, f32, f64 from test_table; 9 | -------------------------------------------------------------------------------- /tests/test_gcloud/testbench/README.md: -------------------------------------------------------------------------------- 1 | # GCS Testbench 2 | 3 | This is a minimal testbench for GCS. It only supports data operation and creating/listing/deleteing bucket. 4 | 5 | ## Install Dependencies 6 | 7 | ```bash 8 | pip install -r requirements.txt 9 | ``` 10 | 11 | ## Run Test Bench 12 | 13 | ```bash 14 | gunicorn --bind "0.0.0.0:9099" --worker-class gevent --chdir "tests/test_gcs/testbench" testbench:application 15 | ``` 16 | -------------------------------------------------------------------------------- /third_party/dlfcn-win32.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # dlfcn-win32 3 | 4 | package(default_visibility = ["//visibility:public"]) 5 | 6 | licenses(["notice"]) # MIT 7 | 8 | exports_files(["COPYING"]) 9 | 10 | cc_library( 11 | name = "dlfcn-win32", 12 | srcs = [ 13 | "dlfcn.c", 14 | "dlfcn.h", 15 | ], 16 | hdrs = [], 17 | defines = [], 18 | includes = [ 19 | ".", 20 | ], 21 | deps = [], 22 | ) 23 | -------------------------------------------------------------------------------- /.github/workflows/build.space.sh: -------------------------------------------------------------------------------- 1 | # Free disk space on Linux 2 | #sudo swapoff /swapfile 3 | #sudo rm -rf /swapfile 4 | sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc 5 | #sudo apt-get remove php* ruby-* subversion mongodb-org -yq >/dev/null 2>&1 6 | sudo apt-get autoremove -y >/dev/null 2>&1 7 | sudo apt-get autoclean -y >/dev/null 2>&1 8 | sudo rm -rf /usr/local/lib/android >/dev/null 2>&1 9 | docker rmi $(docker image ls -aq) >/dev/null 2>&1 10 | -------------------------------------------------------------------------------- /R-package/cran-comments.md: -------------------------------------------------------------------------------- 1 | ## Test environments 2 | 3 | * local OS X install, R 3.5 4 | * ubuntu 14.04 (on travis-ci), R 3.5 5 | * ubuntu 18.04 (on travis-ci), R 3.5 6 | * win-builder (devel) 7 | 8 | ## R CMD check results 9 | 10 | ``` 11 | 0 errors | 0 warnings | 0 note 12 | ``` 13 | 14 | ## Comments 15 | 16 | This is a minor release to resolve warnings and notes in CRAN check result since the required environments are not available on CRAN test machines. 17 | -------------------------------------------------------------------------------- /tests/test_genome/test.fastq: -------------------------------------------------------------------------------- 1 | @NODESC:header 2 | GATTACA 3 | + 4 | BB>B@FA 5 | @M01321:49:000000000-A6HWP:1:1101:17009:2216 1:N:0:1 6 | CGTTAGCGCAGGGGGCATCTTCACACTGGTGACAGGTAACCGCCGTAGTAAAGGTTCCGCCTTTCACT 7 | + 8 | AAAAABF@BBBDGGGG?FFGFGHBFBFBFABBBHGGGFHHCEFGGGGG?FGFFHEDG3EFGGGHEGHG 9 | @FASTQ contains multiple spaces in description 10 | CGGCTGGTCAGGCTGACATCGCCGCCGGCCTGCAGCGAGCCGCTGC 11 | + 12 | FAFAF;F/9;.:/;999B/9A.DFFF;-->.AAB/FC;9-@-=;=. 13 | @FASTQ_with_trailing_space 14 | CGG 15 | + 16 | FAD 17 | -------------------------------------------------------------------------------- /R-package/tfio.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | 18 | BuildType: Package 19 | PackageUseDevtools: Yes 20 | PackageInstallArgs: --no-multiarch --with-keep.source 21 | PackageRoxygenize: rd,collate,namespace 22 | -------------------------------------------------------------------------------- /third_party/hadoop.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # A library for decoding and encoding GIF images 3 | 4 | licenses(["notice"]) # Apache 2.0 5 | 6 | exports_files(["LICENSE.txt"]) 7 | 8 | cc_library( 9 | name = "hadoop", 10 | hdrs = ["hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include/hdfs/hdfs.h"], 11 | copts = [], 12 | includes = ["hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include"], 13 | visibility = ["//visibility:public"], 14 | ) 15 | -------------------------------------------------------------------------------- /third_party/bzip2.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD-like license 4 | 5 | cc_library( 6 | name = "bzip2", 7 | srcs = [ 8 | "blocksort.c", 9 | "bzlib.c", 10 | "bzlib_private.h", 11 | "compress.c", 12 | "crctable.c", 13 | "decompress.c", 14 | "huffman.c", 15 | "randtable.c", 16 | ], 17 | hdrs = [ 18 | "bzlib.h", 19 | ], 20 | copts = [ 21 | ], 22 | includes = ["."], 23 | ) 24 | -------------------------------------------------------------------------------- /tools/build/auditwheel: -------------------------------------------------------------------------------- 1 | TF_SHARED_LIBRARY_NAME=$(grep -r TF_SHARED_LIBRARY_NAME .bazelrc | awk -F= '{print$2}') 2 | 3 | POLICY_JSON=$(find / -name manylinux-policy.json) 4 | 5 | sed -i "s/libresolv.so.2\"/libresolv.so.2\", $TF_SHARED_LIBRARY_NAME, \"libavformat.so.58\", \"libswscale.so.5\", \"libavformat.so.57\", \"libavutil.so.55\", \"libswscale.so.4\", \"libavformat-ffmpeg.so.56\", \"libavcodec-ffmpeg.so.56\", \"libavutil-ffmpeg.so.54\", \"libswscale-ffmpeg.so.3\"/g" $POLICY_JSON 6 | 7 | cat $POLICY_JSON 8 | 9 | auditwheel $@ 10 | -------------------------------------------------------------------------------- /R-package/docs/extra.js: -------------------------------------------------------------------------------- 1 | 2 | $(document).ready(function() { 3 | 4 | // turn functions section into ref-table 5 | $('#functions').find('table').attr('class', 'ref-index'); 6 | 7 | // are we in examples? 8 | var examples = window.location.href.match("/articles/examples/") !== null; 9 | if (examples) { 10 | $('.template-vignette').addClass('examples'); 11 | 12 | // remove right column 13 | $(".col-md-9").removeClass("col-md-9").addClass('col-md-10'); 14 | $(".col-md-3").remove(); 15 | 16 | } 17 | }); 18 | -------------------------------------------------------------------------------- /R-package/man/from_schema.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arrow_dataset.R 3 | \name{from_schema} 4 | \alias{from_schema} 5 | \title{Create an Arrow Dataset from the given Arrow schema.} 6 | \usage{ 7 | from_schema(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{An \R object.} 11 | 12 | \item{...}{Optional arguments passed on to implementing methods.} 13 | } 14 | \description{ 15 | Infer output types and shapes from the given Arrow schema and create an Arrow 16 | Dataset. 17 | } 18 | -------------------------------------------------------------------------------- /R-package/pkgdown/extra.js: -------------------------------------------------------------------------------- 1 | 2 | $(document).ready(function() { 3 | 4 | // turn functions section into ref-table 5 | $('#functions').find('table').attr('class', 'ref-index'); 6 | 7 | // are we in examples? 8 | var examples = window.location.href.match("/articles/examples/") !== null; 9 | if (examples) { 10 | $('.template-vignette').addClass('examples'); 11 | 12 | // remove right column 13 | $(".col-md-9").removeClass("col-md-9").addClass('col-md-10'); 14 | $(".col-md-3").remove(); 15 | 16 | } 17 | }); 18 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # TensorFlow I/O Security 2 | 3 | Please refer to [TensorFlow’s security model and guidelines][tf-security]. 4 | 5 | To report any security related issues, please email `security@tensorflow.org` 6 | [as described in TensorFlow’s `SECURITY.md`][email]. Consult that document for 7 | details, including an encryption key for especially sensitive disclosures. 8 | 9 | [email]: https://github.com/tensorflow/tensorflow/blob/master/SECURITY.md#reporting-vulnerabilities 10 | [tf-security]: https://github.com/tensorflow/tensorflow/blob/master/SECURITY.md 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.bazelrc 2 | /bazel-* 3 | /artifacts 4 | /get-pip.py 5 | .DS_Store 6 | *.pyc 7 | __pycache__ 8 | *.swp 9 | *.whl 10 | 11 | # IDE specific 12 | .vscode/ 13 | .idea/ 14 | *.pbxproj 15 | *.xcworkspacedata 16 | .ipynb_checkpoints 17 | 18 | # Auto-generated files by `R CMD check` 19 | tfio.Rcheck/ 20 | tfio_*.tar.gz 21 | .Rproj.user 22 | *.Rcheck 23 | .cache 24 | 25 | # Setuptools 26 | *.egg-info 27 | /build 28 | /dist 29 | /wheelhouse 30 | 31 | # Lint 32 | .pylint 33 | 34 | # Prometheus 35 | .coredns 36 | .prometheus 37 | 38 | # Kafka 39 | /confluent* -------------------------------------------------------------------------------- /R-package/scripts/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM r-base 2 | COPY . . 3 | 4 | RUN apt-get update && \ 5 | apt-get install -y --no-install-recommends \ 6 | build-essential \ 7 | python-dev \ 8 | python-setuptools \ 9 | python-pip && \ 10 | rm -rf /var/lib/apt/lists/* 11 | 12 | # Dependencies 13 | RUN pip install tensorflow-io 14 | RUN Rscript -e 'install.packages(c("Rcpp", "reticulate", "knitr", "tensorflow", "tfdatasets", "forge", "tidyselect"))' 15 | 16 | # tfio package installation 17 | RUN R CMD build R-package/ 18 | RUN R CMD INSTALL tfio_*.gz 19 | -------------------------------------------------------------------------------- /third_party/aws-c-event-stream.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # AWS C Event Stream 3 | 4 | package(default_visibility = ["//visibility:public"]) 5 | 6 | licenses(["notice"]) # Apache 2.0 7 | 8 | exports_files(["LICENSE"]) 9 | 10 | cc_library( 11 | name = "aws-c-event-stream", 12 | srcs = glob([ 13 | "include/**/*.h", 14 | "source/**/*.c", 15 | ]), 16 | hdrs = [ 17 | ], 18 | defines = [], 19 | includes = [ 20 | "include", 21 | ], 22 | deps = [ 23 | "@aws-c-common", 24 | "@aws-checksums", 25 | ], 26 | ) 27 | -------------------------------------------------------------------------------- /R-package/R/dataset_utils.R: -------------------------------------------------------------------------------- 1 | as_tf_dataset <- function (dataset, tags = NULL) { 2 | if (!is_dataset(dataset)) 3 | stop("Provided dataset is not a TensorFlow Dataset") 4 | if (!inherits(dataset, "tf_dataset")) 5 | class(dataset) <- c("tf_dataset", class(dataset), tags) 6 | dataset 7 | } 8 | 9 | is_dataset <- function (x) { 10 | inherits(x, "tensorflow.python.data.ops.dataset_ops.Dataset") || 11 | inherits(x, "tensorflow.python.data.ops.dataset_ops.DatasetV2") || 12 | is_tfio_dataset(x) 13 | } 14 | 15 | is_tfio_dataset <- function(x) { 16 | grepl("tensorflow_io", class(x)) 17 | } 18 | -------------------------------------------------------------------------------- /third_party/libyuv.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # libyuv library from Chromium 3 | 4 | licenses(["notice"]) 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "libyuv", 10 | srcs = glob([ 11 | "include/libyuv/*.h", 12 | "source/row_*.cc", 13 | "source/scale_*.cc", 14 | ]) + [ 15 | "source/convert_argb.cc", 16 | "source/convert_from_argb.cc", 17 | "source/cpu_id.cc", 18 | "source/planar_functions.cc", 19 | ], 20 | includes = [ 21 | "include", 22 | ], 23 | visibility = ["//visibility:public"], 24 | ) 25 | -------------------------------------------------------------------------------- /third_party/brotli.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # Brotli library 3 | 4 | licenses(["notice"]) # MIT license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "brotli", 10 | srcs = glob([ 11 | "c/common/*.c", 12 | "c/common/*.h", 13 | "c/dec/*.c", 14 | "c/dec/*.h", 15 | "c/enc/*.c", 16 | "c/enc/*.h", 17 | "c/include/brotli/*.h", 18 | ]), 19 | hdrs = [], 20 | defines = [], 21 | includes = [ 22 | "c/dec", 23 | "c/include", 24 | ], 25 | linkopts = [], 26 | visibility = ["//visibility:public"], 27 | ) 28 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/oss/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "oss", 12 | srcs = [ 13 | "oss_filesystem.cc", 14 | "oss_filesystem.h", 15 | ], 16 | copts = tf_io_copts(), 17 | linkstatic = True, 18 | deps = [ 19 | "//tensorflow_io/core/filesystems:filesystem_plugins_header", 20 | "@aliyun_oss_c_sdk", 21 | "@local_config_tf//:tf_header_lib", 22 | ], 23 | alwayslink = 1, 24 | ) 25 | -------------------------------------------------------------------------------- /R-package/man/mnist_image_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mnist_dataset.R 3 | \name{mnist_image_dataset} 4 | \alias{mnist_image_dataset} 5 | \title{Creates a \code{MNISTImageDataset}.} 6 | \usage{ 7 | mnist_image_dataset(filenames, compression_type = NULL) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | 12 | \item{compression_type}{A \code{tf.string} scalar evaluating to one 13 | of \code{""} (no compression), \code{"ZLIB"}, or \code{"GZIP"}.} 14 | } 15 | \description{ 16 | This creates a dataset for MNIST images. 17 | } 18 | -------------------------------------------------------------------------------- /R-package/man/mnist_label_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mnist_dataset.R 3 | \name{mnist_label_dataset} 4 | \alias{mnist_label_dataset} 5 | \title{Creates a \code{MNISTLabelDataset}.} 6 | \usage{ 7 | mnist_label_dataset(filenames, compression_type = NULL) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | 12 | \item{compression_type}{A \code{tf.string} scalar evaluating to one 13 | of \code{""} (no compression), \code{"ZLIB"}, or \code{"GZIP"}.} 14 | } 15 | \description{ 16 | This creates a dataset for MNIST labels. 17 | } 18 | -------------------------------------------------------------------------------- /third_party/libexpat.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # Expat library 3 | 4 | licenses(["notice"]) 5 | 6 | exports_files(["COPYING"]) 7 | 8 | cc_library( 9 | name = "libexpat", 10 | srcs = [ 11 | "lib/xmlparse.c", 12 | "lib/xmlrole.c", 13 | "lib/xmltok.c", 14 | ], 15 | hdrs = glob([ 16 | "lib/*.h", 17 | ]) + [ 18 | "lib/xmltok_impl.c", 19 | "lib/xmltok_ns.c", 20 | ], 21 | copts = [ 22 | "-DHAVE_MEMMOVE", 23 | "-DXML_POOR_ENTROPY", 24 | ], 25 | includes = [ 26 | "lib", 27 | ], 28 | visibility = ["//visibility:public"], 29 | deps = [], 30 | ) 31 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/az/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "az", 12 | srcs = [ 13 | "az_filesystem.cc", 14 | ], 15 | copts = tf_io_copts(), 16 | linkstatic = True, 17 | deps = [ 18 | "//tensorflow_io/core/filesystems:filesystem_plugins_header", 19 | "@com_github_azure_azure_sdk_for_cpp//:azure", 20 | "@com_google_absl//absl/strings", 21 | "@local_tsl//tsl/c:tsl_status", 22 | ], 23 | alwayslink = 1, 24 | ) 25 | -------------------------------------------------------------------------------- /tools/build/swift/BUILD: -------------------------------------------------------------------------------- 1 | load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | swift_library( 6 | name = "audio_video_swift", 7 | srcs = [ 8 | "//tensorflow_io/core:swift/audio.swift", 9 | "//tensorflow_io/core:swift/video.swift", 10 | ], 11 | copts = [ 12 | "-target", 13 | "x86_64-apple-macosx10.14", 14 | ], 15 | linkopts = [ 16 | "-L/usr/lib/swift", 17 | "-Wl,-rpath,/usr/lib/swift", 18 | "-target", 19 | "x86_64-apple-macosx10.14", 20 | ], 21 | module_name = "audio_video", 22 | alwayslink = True, 23 | ) 24 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/hdfs/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "hdfs", 12 | srcs = [ 13 | "hadoop_filesystem.cc", 14 | ], 15 | copts = tf_io_copts(), 16 | linkstatic = True, 17 | deps = [ 18 | "//tensorflow_io/core/filesystems:filesystem_plugins_header", 19 | "@com_google_absl//absl/strings", 20 | "@com_google_absl//absl/synchronization", 21 | "@hadoop", 22 | "@local_tsl//tsl/c:tsl_status", 23 | ], 24 | alwayslink = 1, 25 | ) 26 | -------------------------------------------------------------------------------- /third_party/lmdb.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # LMDB is the Lightning Memory-mapped Database. 3 | 4 | licenses(["notice"]) # OpenLDAP Public License 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "lmdb", 10 | srcs = [ 11 | "mdb.c", 12 | "midl.c", 13 | ], 14 | hdrs = [ 15 | "lmdb.h", 16 | "midl.h", 17 | ], 18 | copts = [ 19 | "-w", 20 | ], 21 | linkopts = select({ 22 | "@bazel_tools//src/conditions:windows": ["-DEFAULTLIB:advapi32.lib"], # InitializeSecurityDescriptor, SetSecurityDescriptorDacl 23 | "//conditions:default": ["-lpthread"], 24 | }), 25 | visibility = ["//visibility:public"], 26 | ) 27 | -------------------------------------------------------------------------------- /R-package/man/pubsub_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/pubsub_dataset.R 3 | \name{pubsub_dataset} 4 | \alias{pubsub_dataset} 5 | \title{Creates a \code{PubSubDataset}.} 6 | \usage{ 7 | pubsub_dataset(subscriptions, server = NULL, eof = FALSE, timeout = 1000) 8 | } 9 | \arguments{ 10 | \item{subscriptions}{A \code{tf.string} tensor containing one or more 11 | subscriptions.} 12 | 13 | \item{server}{The pubsub server.} 14 | 15 | \item{eof}{If True, the pubsub reader will stop on EOF.} 16 | 17 | \item{timeout}{The timeout value for the PubSub to wait (in millisecond).} 18 | } 19 | \description{ 20 | This creates a dataset for consuming PubSub messages. 21 | } 22 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/http/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "http", 12 | srcs = [ 13 | "http_filesystem.cc", 14 | ], 15 | copts = tf_io_copts(), 16 | linkstatic = True, 17 | deps = [ 18 | "//tensorflow_io/core/filesystems:filesystem_plugins_header", 19 | "@com_google_absl//absl/strings", 20 | "@com_google_absl//absl/synchronization", 21 | "@com_google_absl//absl/time", 22 | "@curl", 23 | "@local_tsl//tsl/c:tsl_status", 24 | ], 25 | alwayslink = 1, 26 | ) 27 | -------------------------------------------------------------------------------- /R-package/docs/articles/introduction_files/elevate-section-attrs-2.0/elevate-section-attrs.js: -------------------------------------------------------------------------------- 1 | $(function() { 2 | $("div.section[class*='level'], section[class*='level']").each(function(i, el) { 3 | var $section = $(el); 4 | var $header = $section.children().filter(":header").first(); 5 | if ($header.length === 0) return; 6 | var attrs = $header[0].attributes; 7 | for (var a = 0; a < attrs.length; a++) { 8 | var nm = attrs[a].name; 9 | var val = attrs[a].value; 10 | if (nm === "class") { 11 | $section.addClass(val); 12 | $header.removeClass(val); 13 | continue; 14 | } 15 | $section.attr(nm, val); 16 | $header.attr(nm, null); 17 | } 18 | }); 19 | }); 20 | 21 | -------------------------------------------------------------------------------- /third_party/avro.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # Avro Library 3 | 4 | licenses(["notice"]) # Apache 2.0 5 | 6 | exports_files(["LICENSE.txt"]) 7 | 8 | cc_library( 9 | name = "avro", 10 | srcs = glob( 11 | [ 12 | "api/**/*.hh", 13 | "impl/**/*.hh", 14 | "impl/**/*.cc", 15 | ], 16 | exclude = [ 17 | "impl/avrogencpp.cc", 18 | ], 19 | ), 20 | hdrs = [], 21 | copts = [], 22 | defines = [ 23 | "SNAPPY_CODEC_AVAILABLE", 24 | ], 25 | includes = [ 26 | "api", 27 | ], 28 | linkopts = [], 29 | visibility = ["//visibility:public"], 30 | deps = [ 31 | "@boost", 32 | "@snappy", 33 | ], 34 | ) 35 | -------------------------------------------------------------------------------- /.github/workflows/build.instruction.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sudo = False 4 | if sys.argv[1].startswith("--sudo="): 5 | sudo = sys.argv[1][7:].lower() == "true" 6 | 7 | source = sys.argv[len(sys.argv) - 2] 8 | section = sys.argv[len(sys.argv) - 1] 9 | with open(source, "r") as f: 10 | lines = [line.rstrip() for line in list(f)] 11 | 12 | # Remove lines before section title 13 | lines = lines[lines.index(section) :] 14 | 15 | # Remove lines outside (including) "```sh" and "```" 16 | lines = lines[lines.index("```sh") + 1 : lines.index("```")] 17 | 18 | # Remove sudo 19 | if not sudo: 20 | lines = [ 21 | (line[len("sudo ") :] if line.startswith("sudo ") else line) for line in lines 22 | ] 23 | 24 | print("\n".join(lines)) 25 | -------------------------------------------------------------------------------- /third_party/BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | exports_files(["libmemcached.mem_config.h"]) 4 | 5 | cc_library( 6 | name = "openexr", 7 | srcs = [ 8 | "openexr/b44ExpLogTable.h", 9 | "openexr/dwaLookups.h", 10 | "openexr/eLut.h", 11 | "openexr/toFloat.h", 12 | ], 13 | hdrs = [], 14 | copts = [], 15 | includes = ["openexr"], 16 | visibility = ["//visibility:public"], 17 | deps = [], 18 | ) 19 | 20 | cc_library( 21 | name = "font", 22 | srcs = [ 23 | "font/font_opensans_regular.h", 24 | ], 25 | hdrs = [], 26 | copts = [], 27 | includes = ["font"], 28 | visibility = ["//visibility:public"], 29 | deps = [], 30 | ) 31 | -------------------------------------------------------------------------------- /third_party/fmjpeg2koj.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # JPEG2000 codec for DCMTK based on openjpeg 3 | 4 | licenses(["notice"]) # Apache 2.0 license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "fmjpeg2koj", 10 | srcs = glob([ 11 | "include/fmjpeg2k/*.h", 12 | "*.cc", 13 | "*.cpp", 14 | ]), 15 | copts = select({ 16 | "@bazel_tools//src/conditions:windows": [], 17 | "//conditions:default": [ 18 | "-Wno-register", 19 | "-Wno-error", 20 | ], 21 | }), 22 | includes = [ 23 | "include", 24 | ], 25 | visibility = ["//visibility:public"], 26 | deps = [ 27 | "@dcmtk", 28 | "@openjpeg", 29 | ], 30 | ) 31 | -------------------------------------------------------------------------------- /third_party/lz4.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # LZ4 library 3 | 4 | licenses(["notice"]) # BSD license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "lz4", 10 | srcs = glob([ 11 | "lib/lz4.c", 12 | "lib/lz4.h", 13 | "lib/lz4frame.c", 14 | "lib/lz4frame.h", 15 | "lib/lz4hc.h", 16 | "lib/lz4hc.c", 17 | "lib/xxhash.h", 18 | ]), 19 | hdrs = [], 20 | defines = [ 21 | "XXH_PRIVATE_API", 22 | "LZ4LIB_VISIBILITY=", 23 | ], 24 | includes = [ 25 | "lib", 26 | ], 27 | linkopts = [], 28 | textual_hdrs = [ 29 | "lib/xxhash.c", 30 | "lib/lz4.c", 31 | ], 32 | visibility = ["//visibility:public"], 33 | ) 34 | -------------------------------------------------------------------------------- /tests/test_parquet/parquet_cpp_example.patch: -------------------------------------------------------------------------------- 1 | diff -Naur a/examples/low-level-api/reader-writer.cc b/examples/low-level-api/reader-writer.cc 2 | --- a/examples/low-level-api/reader-writer.cc 2019-10-09 05:21:28.534220084 +0000 3 | +++ b/examples/low-level-api/reader-writer.cc 2019-10-09 06:04:53.748332246 +0000 4 | @@ -139,7 +139,7 @@ 5 | hello[7] = static_cast(static_cast('0') + i / 100); 6 | hello[8] = static_cast(static_cast('0') + (i / 10) % 10); 7 | hello[9] = static_cast(static_cast('0') + i % 10); 8 | - if (i % 2 == 0) { 9 | + if (1) { 10 | int16_t definition_level = 1; 11 | value.ptr = reinterpret_cast(&hello[0]); 12 | value.len = FIXED_LENGTH; 13 | -------------------------------------------------------------------------------- /tensorflow_io/python/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | -------------------------------------------------------------------------------- /third_party/expand_template.bzl: -------------------------------------------------------------------------------- 1 | """ 2 | Helper rule used by libmemcached.BUILD to build libmemcached's 3 | depdendency on headers/libmemcached-1.0/configure.h 4 | """ 5 | 6 | def _expand_template_impl(ctx): 7 | ctx.actions.expand_template( 8 | template = ctx.file.template, 9 | output = ctx.outputs.out, 10 | substitutions = ctx.attr.substitutions, 11 | ) 12 | 13 | expand_template = rule( 14 | attrs = { 15 | "out": attr.output(mandatory = True), 16 | "substitutions": attr.string_dict(mandatory = True), 17 | "template": attr.label( 18 | mandatory = True, 19 | allow_single_file = True, 20 | ), 21 | }, 22 | output_to_genfiles = True, 23 | implementation = _expand_template_impl, 24 | ) 25 | -------------------------------------------------------------------------------- /R-package/tests/testthat/utils.R: -------------------------------------------------------------------------------- 1 | library(tensorflow) 2 | 3 | skip_if_no_tensorflow_io <- function() { 4 | if (!identical(Sys.getenv("NOT_CRAN"), "true")) { 5 | skip("Skip tests on CRAN due to lack of proper Python setup") 6 | } 7 | if (!reticulate::py_module_available("tensorflow_io")) 8 | skip("tensorflow_io Python module is not available for testing") 9 | } 10 | 11 | test_succeeds <- function(desc, expr) { 12 | test_that(desc, { 13 | skip_if_no_tensorflow_io() 14 | expect_error(force(expr), NA) 15 | }) 16 | } 17 | 18 | iterate_all_batches <- function(dataset) { 19 | sess <- tf$Session() 20 | iterator <- make_iterator_one_shot(dataset) 21 | next_batch <- iterator_get_next(iterator) 22 | 23 | until_out_of_range({ 24 | sess$run(next_batch) 25 | }) 26 | } 27 | -------------------------------------------------------------------------------- /third_party/mxml.BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | cc_library( 4 | name = "mxml", 5 | srcs = [ 6 | "config.h", 7 | "mxml-attr.c", 8 | "mxml-entity.c", 9 | "mxml-file.c", 10 | "mxml-get.c", 11 | "mxml-index.c", 12 | "mxml-node.c", 13 | "mxml-private.c", 14 | "mxml-private.h", 15 | "mxml-search.c", 16 | "mxml-set.c", 17 | "mxml-string.c", 18 | ], 19 | hdrs = [ 20 | "mxml.h", 21 | ], 22 | copts = ["-pthread"], 23 | defines = [ 24 | "_GNU_SOURCE", 25 | "_THREAD_SAFE", 26 | "_REENTRANT", 27 | ], 28 | includes = [ 29 | ".", 30 | ], 31 | linkopts = ["-lpthread"], 32 | visibility = ["//visibility:public"], 33 | ) 34 | -------------------------------------------------------------------------------- /third_party/zlib.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD/MIT-like license (for zlib) 4 | 5 | cc_library( 6 | name = "zlib", 7 | srcs = glob([ 8 | "*.c", 9 | "*.h", 10 | ]) + [ 11 | "contrib/minizip/ioapi.c", 12 | "contrib/minizip/ioapi.h", 13 | "contrib/minizip/unzip.c", 14 | "contrib/minizip/unzip.h", 15 | ], 16 | hdrs = [ 17 | "zlib.h", 18 | ], 19 | copts = select({ 20 | "@bazel_tools//src/conditions:windows": [], 21 | "//conditions:default": [ 22 | "-Wno-shift-negative-value", 23 | "-DZ_HAVE_UNISTD_H", 24 | ], 25 | }), 26 | includes = [ 27 | ".", 28 | "contrib/minizip", 29 | ], 30 | ) 31 | -------------------------------------------------------------------------------- /tensorflow_io/core/grpc/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """grpc""" 16 | 17 | import endpoint_pb2 18 | -------------------------------------------------------------------------------- /R-package/man/tiff_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/image_dataset.R 3 | \name{tiff_dataset} 4 | \alias{tiff_dataset} 5 | \title{Create a \code{TIFFDataset}.} 6 | \usage{ 7 | tiff_dataset(filenames) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | } 12 | \description{ 13 | A TIFF Image File Dataset that reads the TIFF file. 14 | } 15 | \examples{ 16 | \dontrun{ 17 | dataset <- tiff_dataset( 18 | filenames = list("testdata/small.tiff")) \%>\% 19 | dataset_repeat(1) 20 | 21 | sess <- tf$Session() 22 | iterator <- make_iterator_one_shot(dataset) 23 | next_batch <- iterator_get_next(iterator) 24 | 25 | until_out_of_range({ 26 | batch <- sess$run(next_batch) 27 | print(batch) 28 | }) 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /R-package/man/webp_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/image_dataset.R 3 | \name{webp_dataset} 4 | \alias{webp_dataset} 5 | \title{Create a \code{WebPDataset}.} 6 | \usage{ 7 | webp_dataset(filenames) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | } 12 | \description{ 13 | A WebP Image File Dataset that reads the WebP file. 14 | } 15 | \examples{ 16 | \dontrun{ 17 | dataset <- webp_dataset( 18 | filenames = list("testdata/sample.webp")) \%>\% 19 | dataset_repeat(1) 20 | 21 | sess <- tf$Session() 22 | iterator <- make_iterator_one_shot(dataset) 23 | next_batch <- iterator_get_next(iterator) 24 | 25 | until_out_of_range({ 26 | batch <- sess$run(next_batch) 27 | print(batch) 28 | }) 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/s3/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "s3", 12 | srcs = [ 13 | "aws_logging.cc", 14 | "aws_logging.h", 15 | "s3_filesystem.cc", 16 | "s3_filesystem.h", 17 | ], 18 | copts = tf_io_copts(), 19 | linkstatic = True, 20 | deps = [ 21 | "//tensorflow_io/core/filesystems:filesystem_plugins_header", 22 | "@aws-sdk-cpp//:s3", 23 | "@aws-sdk-cpp//:transfer", 24 | "@com_google_absl//absl/strings", 25 | "@com_google_absl//absl/synchronization", 26 | "@local_tsl//tsl/c:tsl_status", 27 | ], 28 | alwayslink = 1, 29 | ) 30 | -------------------------------------------------------------------------------- /third_party/vorbis.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD license 4 | 5 | cc_library( 6 | name = "vorbis", 7 | srcs = glob( 8 | [ 9 | "lib/**/*.h", 10 | "lib/**/*.c", 11 | ], 12 | exclude = [ 13 | "lib/barkmel.c", 14 | "lib/psytune.c", 15 | "lib/tone.c", 16 | "lib/misc.c", 17 | ], 18 | ) + select({ 19 | "@bazel_tools//src/conditions:windows": [], 20 | "//conditions:default": [ 21 | "lib/misc.c", 22 | ], 23 | }), 24 | hdrs = glob([ 25 | "include/vorbis/*.h", 26 | ]), 27 | copts = [], 28 | includes = [ 29 | "include", 30 | "lib", 31 | ], 32 | deps = [ 33 | "@ogg", 34 | ], 35 | ) 36 | -------------------------------------------------------------------------------- /third_party/libavif.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # AVIF library 3 | 4 | licenses(["notice"]) # BSD license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "libavif", 10 | srcs = glob( 11 | [ 12 | "include/avif/*.h", 13 | "src/*.c", 14 | ], 15 | exclude = [ 16 | "src/codec_aom.c", 17 | "src/codec_rav1e.c", 18 | ], 19 | ), 20 | hdrs = [], 21 | defines = [ 22 | #"AVIF_CODEC_AOM=1", 23 | "AVIF_CODEC_DAV1D=1", 24 | "AVIF_CODEC_LIBGAV1=1", 25 | #"AVIF_CODEC_RAV1E=1", 26 | ], 27 | includes = [ 28 | "include", 29 | ], 30 | visibility = ["//visibility:public"], 31 | deps = [ 32 | "@dav1d//:dav1d16", 33 | "@dav1d//:dav1d8", 34 | "@libgav1", 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /R-package/index.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "R interface to TensorFlow IO" 3 | output: html_document 4 | type: docs 5 | repo: https://github.com/tensorflow/io 6 | menu: 7 | main: 8 | name: "Overview" 9 | identifier: "tfio-overview" 10 | parent: "tfio-using-tfio" 11 | weight: 10 12 | --- 13 | 14 | ```{r setup, include=FALSE} 15 | knitr::opts_chunk$set(eval = FALSE, echo = TRUE) 16 | ``` 17 | 18 | ## Overview 19 | 20 | This is the R interface to datasets and filesystem extensions maintained by SIG-IO. Some example data sources that TensorFlow I/O supports are: 21 | 22 | * Data source for Apache Ignite and Ignite File System (IGFS). 23 | * Apache Kafka stream-processing. 24 | * Amazon Kinesis data streams. 25 | * Hadoop SequenceFile format. 26 | * Video file format such as mp4. 27 | * Apache Parquet format. 28 | * Image file format such as WebP. 29 | -------------------------------------------------------------------------------- /R-package/docs/link.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 12 | 13 | -------------------------------------------------------------------------------- /tensorflow_io/python/ops/version_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """version_ops""" 16 | 17 | version = "0.37.1" 18 | require = "tensorflow>=2.16.0,<2.17.0" 19 | -------------------------------------------------------------------------------- /R-package/man/lmdb_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lmdb_dataset.R 3 | \name{lmdb_dataset} 4 | \alias{lmdb_dataset} 5 | \title{Create a \code{LMDBDataset}.} 6 | \usage{ 7 | lmdb_dataset(filenames) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | } 12 | \description{ 13 | This function allows a user to read data from a LMDB 14 | file. A lmdb file consists of (key value) pairs sequentially. 15 | } 16 | \examples{ 17 | \dontrun{ 18 | dataset <- sequence_file_dataset("testdata/data.mdb") \%>\% 19 | dataset_repeat(1) 20 | 21 | sess <- tf$Session() 22 | iterator <- make_iterator_one_shot(dataset) 23 | next_batch <- iterator_get_next(iterator) 24 | 25 | until_out_of_range({ 26 | batch <- sess$run(next_batch) 27 | print(batch) 28 | }) 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /tools/docker/build.Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION 2 | ARG TENSORFLOW_VERSION 3 | FROM tensorflow/build:${TENSORFLOW_VERSION}-python$PYTHON_VERSION 4 | 5 | ARG PYTHON_VERSION 6 | ARG TENSORFLOW_VERSION 7 | ARG BAZEL_OPTIMIZATION 8 | 9 | ADD . /opt/io 10 | WORKDIR /opt/io 11 | 12 | RUN python${PYTHON_VERSION} -m pip install $(grep tensorflow tensorflow_io/python/ops/version_ops.py | cut -d '"' -f 2) 13 | 14 | RUN python$PYTHON_VERSION -m pip uninstall -y tensorflow-io-gcs-filesystem 15 | 16 | RUN python$PYTHON_VERSION tools/build/configure.py 17 | 18 | RUN cat .bazelrc 19 | 20 | RUN TF_PYTHON_VERSION=${PYTHON_VERSION} bazel build --copt="-fPIC" --crosstool_top=@ubuntu20.04-gcc9_manylinux2014-cuda11.8-cudnn8.6-tensorrt8.4_config_cuda//crosstool:toolchain --noshow_progress --verbose_failures ${BAZEL_OPTIMIZATION} -- //tensorflow_io/... //tensorflow_io_gcs_filesystem/... 21 | -------------------------------------------------------------------------------- /tools/lint/black_python.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """black""" 16 | import sys 17 | import black 18 | 19 | print("black: ", sys.argv) 20 | black.main(sys.argv[1:]) 21 | -------------------------------------------------------------------------------- /docs/_toc.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # ============================================================================== 16 | toc: 17 | - title: Overview 18 | path: /io/overview 19 | - title: "Configuring Visual Studio Code" 20 | path: /io/vscode 21 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io_gcs_filesystem""" 16 | 17 | from tensorflow_io_gcs_filesystem.core.python.ops import plugin_gs 18 | -------------------------------------------------------------------------------- /third_party/xsimd.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD 3-Clause 4 | 5 | exports_files(["LICENSE"]) 6 | 7 | cc_library( 8 | name = "xsimd", 9 | srcs = [], 10 | hdrs = glob( 11 | [ 12 | "include/xsimd/*.hpp", 13 | "include/xsimd/arch/*.hpp", 14 | "include/xsimd/arch/generic/*.hpp", 15 | "include/xsimd/config/*.hpp", 16 | "include/xsimd/math/*.hpp", 17 | "include/xsimd/memory/*.hpp", 18 | "include/xsimd/stl/*.hpp", 19 | "include/xsimd/types/*.hpp", 20 | ], 21 | exclude = [ 22 | ], 23 | ), 24 | copts = [], 25 | defines = [], 26 | includes = [ 27 | "include", 28 | ], 29 | linkopts = [], 30 | visibility = ["//visibility:public"], 31 | deps = [ 32 | ], 33 | ) 34 | -------------------------------------------------------------------------------- /third_party/toolchains/gcc7_manylinux2010/cc_wrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2015 The Bazel Authors. All rights reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | # Ship the environment to the C++ action 18 | # 19 | set -eu 20 | 21 | # Set-up the environment 22 | 23 | 24 | # Call the C++ compiler 25 | /dt7/usr/bin/gcc "$@" 26 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2023 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | version: 2 16 | updates: 17 | - package-ecosystem: github-actions 18 | directory: / 19 | schedule: 20 | interval: daily 21 | groups: 22 | github-actions: 23 | patterns: 24 | - "*" 25 | open-pull-requests-limit: 3 26 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not 4 | # use this file except in compliance with the License. You may obtain a copy of 5 | # the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations under 13 | # the License. 14 | # ============================================================================== 15 | """ 16 | conftest file for pytest 17 | """ 18 | 19 | 20 | def pytest_sessionfinish(session, exitstatus): 21 | if exitstatus == 5: 22 | session.exitstatus = 0 23 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/version.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.version""" 16 | 17 | from tensorflow_io.python.ops.version_ops import ( # pylint: disable=unused-import 18 | version as VERSION, 19 | ) 20 | -------------------------------------------------------------------------------- /tools/lint/pyupgrade_python.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """pyupgrade""" 16 | import sys 17 | import pyupgrade 18 | from pyupgrade._main import main 19 | 20 | print("pyupgrade: ", sys.argv) 21 | sys.exit(main(sys.argv[1:])) 22 | -------------------------------------------------------------------------------- /tensorflow_io/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io""" 16 | 17 | from tensorflow_io.python.api import * # pylint: disable=wildcard-import 18 | from tensorflow_io.python.api.version import VERSION as __version__ 19 | -------------------------------------------------------------------------------- /tests/test_gcloud/test_gcs.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | set -o pipefail 3 | 4 | if [ "$#" -eq 1 ]; then 5 | container=$1 6 | docker pull python:3.8 7 | docker run -d --rm --net=host --name=$container -v $PWD:/v -w /v python:3.8 bash -x -c 'python3 -m pip install -r /v/tests/test_gcloud/testbench/requirements.txt && gunicorn --bind "0.0.0.0:9099" --worker-class gevent --chdir "/v/tests/test_gcloud/testbench" testbench:application' 8 | echo wait 30 secs until gcs emulator is up and running 9 | sleep 30 10 | exit 0 11 | fi 12 | 13 | export PATH=$(python3 -m site --user-base)/bin:$PATH 14 | 15 | python3 -m pip install -r tests/test_gcloud/testbench/requirements.txt 16 | echo starting gcs-testbench 17 | gunicorn --bind "0.0.0.0:9099" \ 18 | --worker-class gevent \ 19 | --chdir "tests/test_gcloud/testbench" \ 20 | testbench:application & 21 | sleep 30 # Wait for storage emulator to start 22 | echo gcs-testbench started successfully 23 | -------------------------------------------------------------------------------- /third_party/libwebp.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # WebP codec library 3 | 4 | licenses(["notice"]) # WebM license 5 | 6 | exports_files(["COPYING"]) 7 | 8 | cc_library( 9 | name = "libwebp", 10 | srcs = glob([ 11 | "src/dsp/*.c", 12 | "src/dsp/*.h", 13 | "src/utils/*.c", 14 | "src/utils/*.h", 15 | "src/dec/*.c", 16 | "src/dec/*.h", 17 | "src/demux/*.c", 18 | "src/demux/*.h", 19 | "src/enc/*.c", 20 | "src/enc/*.h", 21 | "src/webp/*.h", 22 | ]) + [ 23 | "imageio/imageio_util.c", 24 | "imageio/webpdec.c", 25 | "imageio/metadata.c", 26 | "imageio/webpdec.h", 27 | "imageio/metadata.h", 28 | "imageio/imageio_util.h", 29 | "examples/unicode.h", 30 | ], 31 | defines = [ 32 | ], 33 | includes = [ 34 | "src", 35 | ], 36 | visibility = ["//visibility:public"], 37 | ) 38 | -------------------------------------------------------------------------------- /R-package/man/video_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/video_dataset.R 3 | \name{video_dataset} 4 | \alias{video_dataset} 5 | \title{Create a \code{VideoDataset} that reads the video file.} 6 | \usage{ 7 | video_dataset(filenames) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | } 12 | \description{ 13 | This allows a user to read data from a video file with ffmpeg. The output of 14 | VideoDataset is a sequence of (height, weight, 3) tensor in rgb24 format. 15 | } 16 | \examples{ 17 | \dontrun{ 18 | dataset <- video_dataset( 19 | filenames = list("testdata/small.mp4")) \%>\% 20 | dataset_repeat(2) 21 | 22 | sess <- tf$Session() 23 | iterator <- make_iterator_one_shot(dataset) 24 | next_batch <- iterator_get_next(iterator) 25 | 26 | until_out_of_range({ 27 | batch <- sess$run(next_batch) 28 | print(batch) 29 | }) 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/ffmpeg.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.ffmpeg""" 16 | 17 | from tensorflow_io.python.experimental.ffmpeg_ops import ( # pylint: disable=unused-import 18 | decode_video, 19 | ) 20 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/filesystem.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.filesystem""" 16 | 17 | from tensorflow_io.python.experimental.filesystem_ops import ( # pylint: disable=unused-import 18 | set_configuration, 19 | ) 20 | -------------------------------------------------------------------------------- /third_party/zstd.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # Zstandard library 3 | 4 | licenses(["notice"]) # BSD license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "zstd", 10 | srcs = glob( 11 | [ 12 | "lib/common/*.h", 13 | "lib/common/*.c", 14 | "lib/compress/*.c", 15 | "lib/compress/*.h", 16 | "lib/decompress/*.c", 17 | "lib/decompress/*.h", 18 | ], 19 | exclude = [ 20 | "lib/common/xxhash.c", 21 | ], 22 | ), 23 | hdrs = [ 24 | "lib/zstd.h", 25 | ], 26 | defines = [ 27 | "XXH_PRIVATE_API", 28 | "ZSTDLIB_VISIBILITY=", 29 | "ZSTDERRORLIB_VISIBILITY=", 30 | ], 31 | includes = [ 32 | "lib", 33 | "lib/common", 34 | ], 35 | linkopts = [], 36 | textual_hdrs = [ 37 | "lib/common/xxhash.c", 38 | ], 39 | visibility = ["//visibility:public"], 40 | ) 41 | -------------------------------------------------------------------------------- /third_party/snappy.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD 3-Clause 4 | 5 | exports_files(["COPYING"]) 6 | 7 | cc_library( 8 | name = "snappy", 9 | srcs = glob( 10 | [ 11 | "*.cc", 12 | "*.h", 13 | ], 14 | exclude = [ 15 | "*test.*", 16 | "*fuzzer.*", 17 | ], 18 | ), 19 | hdrs = [ 20 | "snappy-stubs-public.h", 21 | ], 22 | copts = [], 23 | includes = ["."], 24 | ) 25 | 26 | genrule( 27 | name = "snappy_stubs_public_h", 28 | srcs = ["snappy-stubs-public.h.in"], 29 | outs = ["snappy-stubs-public.h"], 30 | cmd = ("sed " + 31 | "-e 's/$${HAVE_SYS_UIO_H_01}/HAVE_SYS_UIO_H/g' " + 32 | "-e 's/$${PROJECT_VERSION_MAJOR}/1/g' " + 33 | "-e 's/$${PROJECT_VERSION_MINOR}/1/g' " + 34 | "-e 's/$${PROJECT_VERSION_PATCH}/8/g' " + 35 | "$< >$@"), 36 | ) 37 | -------------------------------------------------------------------------------- /tensorflow_io/core/grpc/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load("@com_github_grpc_grpc//bazel:cc_grpc_library.bzl", "cc_grpc_library") 6 | load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_grpc_library", "py_proto_library") 7 | 8 | proto_library( 9 | name = "endpoint_proto", 10 | srcs = ["endpoint.proto"], 11 | deps = ["@com_google_protobuf//:any_proto"], 12 | ) 13 | 14 | cc_proto_library( 15 | name = "endpoint_cc_proto", 16 | deps = [":endpoint_proto"], 17 | ) 18 | 19 | cc_grpc_library( 20 | name = "endpoint_cc_grpc", 21 | srcs = [":endpoint_proto"], 22 | grpc_only = True, 23 | deps = [":endpoint_cc_proto"], 24 | ) 25 | 26 | py_proto_library( 27 | name = "endpoint_py_pb2", 28 | deps = [":endpoint_proto"], 29 | ) 30 | 31 | py_grpc_library( 32 | name = "endpoint_py_pb2_grpc", 33 | srcs = [":endpoint_proto"], 34 | deps = [":endpoint_py_pb2"], 35 | ) 36 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/elasticsearch.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.elasticsearch""" 16 | 17 | from tensorflow_io.python.experimental.elasticsearch_dataset_ops import ( # pylint: disable=unused-import 18 | ElasticsearchIODataset, 19 | ) 20 | -------------------------------------------------------------------------------- /third_party/libgav1.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # libgav1 decoder for AVIF library 3 | 4 | licenses(["notice"]) # Apache license 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "libgav1", 10 | srcs = glob( 11 | [ 12 | "src/**/*.cc", 13 | "src/**/*.h", 14 | ], 15 | exclude = ["src/**/*_test.cc"], 16 | ), 17 | hdrs = glob([ 18 | "src/**/*.inc", 19 | ]), 20 | defines = [ 21 | "LIBGAV1_MAX_BITDEPTH=8", 22 | "LIBGAV1_PUBLIC=", 23 | ], 24 | includes = [ 25 | "src", 26 | ], 27 | visibility = ["//visibility:public"], 28 | deps = [ 29 | "@com_google_absl//absl/algorithm", 30 | "@com_google_absl//absl/container:inlined_vector", 31 | "@com_google_absl//absl/strings:str_format", 32 | "@com_google_absl//absl/synchronization", 33 | "@com_google_absl//absl/types:optional", 34 | "@com_google_absl//absl/types:span", 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /tools/build/tensorflow_io.bzl: -------------------------------------------------------------------------------- 1 | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") 2 | 3 | def tf_io_copts(): 4 | return ( 5 | select({ 6 | "@bazel_tools//src/conditions:windows": [ 7 | "/DEIGEN_STRONG_INLINE=inline", 8 | "-DTENSORFLOW_MONOLITHIC_BUILD", 9 | "/DPLATFORM_WINDOWS", 10 | "/DEIGEN_HAS_C99_MATH", 11 | "/DTENSORFLOW_USE_EIGEN_THREADPOOL", 12 | "/DEIGEN_AVOID_STL_ARRAY", 13 | "/Iexternal/gemmlowp", 14 | "/wd4018", 15 | "/wd4577", 16 | "/DNOGDI", 17 | "/UTF_COMPILE_LIBRARY", 18 | "/DNDEBUG", 19 | ], 20 | "@bazel_tools//src/conditions:darwin": [ 21 | "-DNDEBUG", 22 | ], 23 | "//conditions:default": [ 24 | "-DNDEBUG", 25 | "-pthread", 26 | ], 27 | }) 28 | ) 29 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/filter.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.filter""" 16 | 17 | from tensorflow_io.python.experimental.filter_ops import ( # pylint: disable=unused-import 18 | gaussian, 19 | laplacian, 20 | prewitt, 21 | sobel, 22 | gabor, 23 | ) 24 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/text.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.text""" 16 | 17 | from tensorflow_io.python.experimental.text_ops import ( # pylint: disable=unused-import 18 | decode_libsvm, 19 | re2_full_match, 20 | read_text, 21 | TextOutputSequence, 22 | ) 23 | -------------------------------------------------------------------------------- /third_party/toolchains/gcc7_manylinux2010/dummy_toolchain.bzl: -------------------------------------------------------------------------------- 1 | # pylint: disable=g-bad-file-header 2 | # Copyright 2017 The Bazel Authors. All rights reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """Skylark rule that stubs a toolchain.""" 17 | 18 | def _dummy_toolchain_impl(ctx): 19 | ctx = ctx # unused argument 20 | toolchain = platform_common.ToolchainInfo() 21 | return [toolchain] 22 | 23 | dummy_toolchain = rule(_dummy_toolchain_impl, attrs = {}) 24 | -------------------------------------------------------------------------------- /tools/docker/README.md: -------------------------------------------------------------------------------- 1 | # Tensorflow-IO Dockerfiles 2 | 3 | This directory maintains the Dockerfiles needed to build the tensorflow-io images. 4 | 5 | ## Building 6 | 7 | To build a `tensorflow-io` image with CPU support: 8 | 9 | ```bash 10 | $ docker build -f ./cpu.Dockerfile -t tfio-cpu . 11 | ``` 12 | 13 | **NOTE:** Each `.Dockerfile` has its own set of available `--build-arg`s which are documented 14 | in the file itself. 15 | 16 | ## Running Locally Built Images 17 | 18 | **Note for new Docker users:** the `-v` and `-u` flags share directories and 19 | permissions between the Docker container and your machine. Without `-v`, your 20 | work will be wiped once the container quits, and without `-u`, files created by 21 | the container will have the wrong file permissions on your host machine. Check 22 | out the 23 | [Docker run documentation](https://docs.docker.com/engine/reference/run/) for 24 | more info. 25 | 26 | ```sh 27 | # Mount $PWD into the container and make it as the current working directory. 28 | $ docker run -it --rm -v ${PWD}:/v -w /v tfio-cpu 29 | ``` -------------------------------------------------------------------------------- /R-package/man/sequence_file_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/hadoop_dataset.R 3 | \name{sequence_file_dataset} 4 | \alias{sequence_file_dataset} 5 | \title{Create a \code{SequenceFileDataset}.} 6 | \usage{ 7 | sequence_file_dataset(filenames) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor containing one or more filenames.} 11 | } 12 | \description{ 13 | This function allows a user to read data from a hadoop sequence 14 | file. A sequence file consists of (key value) pairs sequentially. At 15 | the moment, \code{org.apache.hadoop.io.Text} is the only serialization type 16 | being supported, and there is no compression support. 17 | } 18 | \examples{ 19 | \dontrun{ 20 | dataset <- sequence_file_dataset("testdata/string.seq") \%>\% 21 | dataset_repeat(1) 22 | 23 | sess <- tf$Session() 24 | iterator <- make_iterator_one_shot(dataset) 25 | next_batch <- iterator_get_next(iterator) 26 | 27 | until_out_of_range({ 28 | batch <- sess$run(next_batch) 29 | print(batch) 30 | }) 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /R-package/man/kinesis_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/kinesis_dataset.R 3 | \name{kinesis_dataset} 4 | \alias{kinesis_dataset} 5 | \title{Creates a \code{KinesisDataset}.} 6 | \usage{ 7 | kinesis_dataset(stream, shard = "", read_indefinitely = TRUE, interval = 1e+05) 8 | } 9 | \arguments{ 10 | \item{stream}{A \code{tf.string} tensor containing the name of the stream.} 11 | 12 | \item{shard}{A \code{tf.string} tensor containing the id of the shard.} 13 | 14 | \item{read_indefinitely}{If \code{True}, the Kinesis dataset will keep retry again 15 | on \code{EOF} after the \code{interval} period. If \code{False}, then the dataset will 16 | stop on \code{EOF}. The default value is \code{True}.} 17 | 18 | \item{interval}{The interval for the Kinesis Client to wait before it tries 19 | to get records again (in millisecond).} 20 | } 21 | \description{ 22 | Kinesis is a managed service provided by AWS for data streaming. 23 | This dataset reads messages from Kinesis with each message presented 24 | as a \code{tf.string}. 25 | } 26 | -------------------------------------------------------------------------------- /third_party/ogg.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # BSD license 4 | 5 | cc_library( 6 | name = "ogg", 7 | srcs = glob([ 8 | "src/*.h", 9 | "src/*.c", 10 | ]), 11 | hdrs = glob([ 12 | "include/ogg/*.h", 13 | ]) + [ 14 | "include/ogg/config_types.h", 15 | ], 16 | copts = [], 17 | includes = [ 18 | "include", 19 | ], 20 | ) 21 | 22 | genrule( 23 | name = "config_types_h", 24 | srcs = ["include/ogg/config_types.h.in"], 25 | outs = ["include/ogg/config_types.h"], 26 | cmd = ("sed " + 27 | "-e 's/@INCLUDE_INTTYPES_H@/1/g' " + 28 | "-e 's/@INCLUDE_STDINT_H@/1/g' " + 29 | "-e 's/@INCLUDE_SYS_TYPES_H@/1/g' " + 30 | "-e 's/@SIZE16@/int16_t/g' " + 31 | "-e 's/@USIZE16@/uint16_t/g' " + 32 | "-e 's/@SIZE32@/int32_t/g' " + 33 | "-e 's/@USIZE32@/uint32_t/g' " + 34 | "-e 's/@SIZE64@/int64_t/g' " + 35 | "-e 's/@USIZE64@/uint64_t/g' " + 36 | "$< >$@"), 37 | ) 38 | -------------------------------------------------------------------------------- /configure.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | rm -f .bazelrc 18 | 19 | PYTHON=python3 20 | if [[ "$#" -gt 0 ]]; then 21 | PYTHON=$1 22 | fi 23 | 24 | if $PYTHON -c "import tensorflow as tf" &> /dev/null; then 25 | echo 'using installed tensorflow' 26 | else 27 | $PYTHON -m pip install $($PYTHON setup.py --install-require) 28 | fi 29 | $PYTHON tools/build/configure.py 30 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/mongodb.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.mongodb""" 16 | 17 | from tensorflow_io.python.experimental.mongodb_dataset_ops import ( # pylint: disable=unused-import 18 | MongoDBIODataset, 19 | ) 20 | 21 | from tensorflow_io.python.experimental.mongodb_writer_ops import ( # pylint: disable=unused-import 22 | MongoDBWriter, 23 | ) 24 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/serialization.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.serialization""" 16 | 17 | from tensorflow_io.python.experimental.serialization_ops import ( # pylint: disable=unused-import 18 | decode_json, 19 | decode_avro, 20 | encode_avro, 21 | ) 22 | 23 | from tensorflow_io.python.experimental.serial_ops import save_dataset, load_dataset 24 | -------------------------------------------------------------------------------- /R-package/man/from_schema.arrow_stream_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arrow_dataset.R 3 | \name{from_schema.arrow_stream_dataset} 4 | \alias{from_schema.arrow_stream_dataset} 5 | \title{Create an Arrow Dataset from an input stream, inferring output types and 6 | shapes from the given Arrow schema.} 7 | \usage{ 8 | \method{from_schema}{arrow_stream_dataset}(object, schema, columns = NULL, host = NULL, filenames = NULL, ...) 9 | } 10 | \arguments{ 11 | \item{object}{An \R object.} 12 | 13 | \item{schema}{Arrow schema defining the record batch data in the stream.} 14 | 15 | \item{columns}{A list of column indices to be used in the Dataset.} 16 | 17 | \item{host}{A \code{tf.string} tensor or string defining the input stream. 18 | For a socket client, use ":", for stdin use "STDIN".} 19 | 20 | \item{filenames}{Not used.} 21 | 22 | \item{...}{Optional arguments passed on to implementing methods.} 23 | } 24 | \description{ 25 | Create an Arrow Dataset from an input stream, inferring output types and 26 | shapes from the given Arrow schema. 27 | } 28 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/image.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.image""" 16 | 17 | from tensorflow_io.python.ops.image_ops import ( # pylint: disable=unused-import 18 | decode_webp, 19 | encode_bmp, 20 | encode_gif, 21 | ) 22 | from tensorflow_io.python.ops.dicom_ops import ( # pylint: disable=unused-import 23 | decode_dicom_data, 24 | decode_dicom_image, 25 | dicom_tags, 26 | ) 27 | -------------------------------------------------------------------------------- /tests/test_aws/aws_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | set -e 18 | set -o pipefail 19 | 20 | LOCALSTACK_VERSION=0.12.10 21 | docker pull localstack/localstack:$LOCALSTACK_VERSION 22 | docker run -d --rm --net=host --name=tensorflow-io-aws localstack/localstack:$LOCALSTACK_VERSION 23 | echo "Waiting for 10 secs until localstack is up and running" 24 | sleep 10 25 | echo "Localstack up" 26 | exit 0 27 | -------------------------------------------------------------------------------- /tests/test_version.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not 4 | # use this file except in compliance with the License. You may obtain a copy of 5 | # the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations under 13 | # the License. 14 | # ============================================================================== 15 | """Tests for version string""" 16 | 17 | 18 | import tensorflow_io as tfio 19 | from tensorflow_io.python.ops import version_ops 20 | 21 | 22 | def test_version(): 23 | """test_version""" 24 | assert tfio.__version__ == version_ops.version 25 | assert tfio.version.VERSION == version_ops.version 26 | 27 | 28 | if __name__ == "__main__": 29 | test.main() 30 | -------------------------------------------------------------------------------- /R-package/man/from_schema.arrow_feather_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arrow_dataset.R 3 | \name{from_schema.arrow_feather_dataset} 4 | \alias{from_schema.arrow_feather_dataset} 5 | \title{Create an Arrow Dataset for reading record batches from Arrow feather files, 6 | inferring output types and shapes from the given Arrow schema.} 7 | \usage{ 8 | \method{from_schema}{arrow_feather_dataset}(object, schema, columns = NULL, host = NULL, filenames = NULL, ...) 9 | } 10 | \arguments{ 11 | \item{object}{An \R object.} 12 | 13 | \item{schema}{Arrow schema defining the record batch data in the stream.} 14 | 15 | \item{columns}{A list of column indices to be used in the Dataset.} 16 | 17 | \item{host}{Not used.} 18 | 19 | \item{filenames}{A \code{tf.string} tensor, list or scalar containing files in 20 | Arrow Feather format.} 21 | 22 | \item{...}{Optional arguments passed on to implementing methods.} 23 | } 24 | \description{ 25 | Create an Arrow Dataset for reading record batches from Arrow feather files, 26 | inferring output types and shapes from the given Arrow schema. 27 | } 28 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/genome.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Genomics related ops for Tensorflow. 16 | 17 | This package provides ops for reading common genomics file formats and 18 | performing common genomics IO-related operations. 19 | """ 20 | 21 | from tensorflow_io.python.ops.genome_ops import ( # pylint: disable=unused-import 22 | read_fastq, 23 | sequences_to_onehot, 24 | phred_sequences_to_probability, 25 | ) 26 | -------------------------------------------------------------------------------- /third_party/aws-c-common.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # AWS C Common 3 | 4 | package(default_visibility = ["//visibility:public"]) 5 | 6 | licenses(["notice"]) # Apache 2.0 7 | 8 | exports_files(["LICENSE"]) 9 | 10 | cc_library( 11 | name = "aws-c-common", 12 | srcs = glob([ 13 | "include/aws/common/*.h", 14 | "include/aws/common/private/*.h", 15 | "source/*.c", 16 | ]) + select({ 17 | "@bazel_tools//src/conditions:windows": glob([ 18 | "source/windows/*.c", 19 | ]), 20 | "//conditions:default": glob([ 21 | "source/posix/*.c", 22 | ]), 23 | }), 24 | hdrs = [ 25 | "include/aws/common/config.h", 26 | ], 27 | defines = [], 28 | includes = [ 29 | "include", 30 | ], 31 | textual_hdrs = glob([ 32 | "include/**/*.inl", 33 | ]), 34 | deps = [], 35 | ) 36 | 37 | genrule( 38 | name = "config_h", 39 | srcs = [ 40 | "include/aws/common/config.h.in", 41 | ], 42 | outs = [ 43 | "include/aws/common/config.h", 44 | ], 45 | cmd = "sed 's/cmakedefine/undef/g' $< > $@", 46 | ) 47 | -------------------------------------------------------------------------------- /third_party/toolchains/gpu/cuda/cuda_config.h.tpl: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef CUDA_CUDA_CONFIG_H_ 17 | #define CUDA_CUDA_CONFIG_H_ 18 | 19 | #define TF_CUDA_CAPABILITIES %{cuda_compute_capabilities} 20 | 21 | #define TF_CUDA_VERSION "%{cuda_version}" 22 | #define TF_CUDA_LIB_VERSION "%{cuda_lib_version}" 23 | #define TF_CUDNN_VERSION "%{cudnn_version}" 24 | 25 | #define TF_CUDA_TOOLKIT_PATH "%{cuda_toolkit_path}" 26 | 27 | #endif // CUDA_CUDA_CONFIG_H_ -------------------------------------------------------------------------------- /tools/docker/cpu.Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | # Python version for the base image 17 | ARG PYTHON_VERSION=3.7-slim 18 | 19 | FROM python:${PYTHON_VERSION} 20 | 21 | # tfio package name and version for pip install 22 | ARG TFIO_PACKAGE=tensorflow-io 23 | ARG TFIO_PACKAGE_VERSION= 24 | ARG TENSORFLOW_VARIANT=tensorflow 25 | 26 | RUN pip install ${TFIO_PACKAGE}${TFIO_PACKAGE_VERSION:+==${TFIO_PACKAGE_VERSION}}[${TENSORFLOW_VARIANT}] 27 | -------------------------------------------------------------------------------- /.github/workflows/release.note.yml: -------------------------------------------------------------------------------- 1 | name: Release Note 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | release-note: 13 | permissions: 14 | contents: write # for peter-evans/create-pull-request to create branch 15 | pull-requests: write # for peter-evans/create-pull-request to create a PR 16 | name: README.md 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 20 | with: 21 | fetch-depth: 0 22 | - run: git tag 23 | - run: python3 tools/release/note_create.py 24 | - run: git diff 25 | - run: python3 tools/release/note_update.py 26 | - run: git diff 27 | - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 28 | with: 29 | commit-message: Update RELEASE.md [bot] 30 | branch: bot-RELEASE.md 31 | delete-branch: true 32 | title: 'Update RELEASE.md [bot]' 33 | body: | 34 | README.md: auto-updated by .github/workflows/release.note.yml 35 | -------------------------------------------------------------------------------- /third_party/boost.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # Boost C++ Library 3 | 4 | package(default_visibility = ["//visibility:public"]) 5 | 6 | licenses(["notice"]) # Boost Software License 7 | 8 | exports_files(["LICENSE_1_0.txt"]) 9 | 10 | cc_library( 11 | name = "boost", 12 | srcs = glob([ 13 | "boost/**/*.hpp", 14 | "boost/predef/**/*.h", 15 | "boost/detail/**/*.ipp", 16 | "boost/asio/**/*.ipp", 17 | "boost/date_time/**/*.ipp", 18 | "boost/xpressive/detail/**/*.ipp", 19 | ]) + glob([ 20 | "libs/filesystem/src/*.cpp", 21 | "libs/iostreams/src/*.cpp", 22 | "libs/regex/src/*.cpp", 23 | "libs/system/src/*.cpp", 24 | ]) + [ 25 | "boost/predef.h", 26 | "libs/filesystem/src/error_handling.hpp", 27 | "libs/regex/src/internals.hpp", 28 | ], 29 | defines = [ 30 | "BOOST_ALL_NO_LIB=1", 31 | "_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION=1", 32 | ], 33 | includes = [ 34 | ".", 35 | ], 36 | deps = [ 37 | "@bzip2", 38 | "@xz//:lzma", 39 | "@zlib", 40 | "@zstd", 41 | ], 42 | ) 43 | -------------------------------------------------------------------------------- /tools/docker/nightly.Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | # Python version for the base image 17 | ARG PYTHON_VERSION=3.7-slim 18 | 19 | FROM python:${PYTHON_VERSION} 20 | 21 | # tfio package name and version for pip install 22 | ARG TFIO_PACKAGE=tensorflow-io-nightly 23 | ARG TFIO_PACKAGE_VERSION= 24 | ARG TENSORFLOW_VARIANT=tensorflow 25 | 26 | RUN pip install ${TFIO_PACKAGE}${TFIO_PACKAGE_VERSION:+==${TFIO_PACKAGE_VERSION}}[${TENSORFLOW_VARIANT}] 27 | -------------------------------------------------------------------------------- /tests/test_atds_avro/utils/hash_util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2023 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Utility functions for hashing""" 16 | 17 | 18 | def int_to_bytes(x): 19 | # Add one bit because a signed N-bit int can only represent up to 2^(N-1) - 1 20 | # (instead of an unsigned N-bit int which can represent up to 2^N - 1). 21 | # For example, 128 requires 9 bits (therefore two bytes) in twos complement. 22 | return x.to_bytes(x.bit_length() // 8 + 1, byteorder="little", signed=True) 23 | -------------------------------------------------------------------------------- /tensorflow_io/core/ops/bigquery_test_ops.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow/core/framework/common_shape_fns.h" 17 | #include "tensorflow/core/framework/op.h" 18 | 19 | namespace tensorflow { 20 | 21 | REGISTER_OP("IO>BigQueryTestClient") 22 | .Attr("fake_server_address: string") 23 | .Attr("container: string = ''") 24 | .Attr("shared_name: string = ''") 25 | .Output("client: resource") 26 | .SetShapeFn(shape_inference::ScalarShape); 27 | 28 | } // namespace tensorflow 29 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/bigtable.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.bigtable""" 16 | 17 | from tensorflow_io.python.ops.bigtable.bigtable_dataset_ops import BigtableClient 18 | from tensorflow_io.python.ops.bigtable.bigtable_dataset_ops import BigtableTable 19 | import tensorflow_io.python.ops.bigtable.bigtable_version_filters as filters 20 | import tensorflow_io.python.ops.bigtable.bigtable_row_set as row_set 21 | import tensorflow_io.python.ops.bigtable.bigtable_row_range as row_range 22 | -------------------------------------------------------------------------------- /tensorflow_io/python/ops/golang_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Dataset.""" 16 | 17 | from tensorflow_io.python.ops import _load_library 18 | 19 | _golang_ops = _load_library("libtensorflow_io_golang.so") 20 | 21 | io_prometheus_readable_init = _golang_ops.io_prometheus_readable_init 22 | io_prometheus_readable_spec = _golang_ops.io_prometheus_readable_spec 23 | io_prometheus_readable_read = _golang_ops.io_prometheus_readable_read 24 | io_prometheus_scrape = _golang_ops.io_prometheus_scrape 25 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/image.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.image""" 16 | 17 | from tensorflow_io.python.experimental.image_ops import ( # pylint: disable=unused-import 18 | draw_bounding_boxes, 19 | decode_jpeg_exif, 20 | decode_tiff_info, 21 | decode_tiff, 22 | decode_exr_info, 23 | decode_exr, 24 | decode_pnm, 25 | decode_hdr, 26 | decode_nv12, 27 | decode_yuy2, 28 | decode_avif, 29 | decode_jp2, 30 | decode_obj, 31 | ) 32 | -------------------------------------------------------------------------------- /tests/test_atds_avro/utils/generator/sparse_util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2023 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | 17 | def coord_to_int(coord, shape): 18 | """Convert a location in a tensor to its unique index, in row-major order. 19 | For example, in the 2d tensor 20 | [[0, 1, 2] 21 | [3, 4, 5]] 22 | The location [1, 0] (i.e. the entry in the second row, first column) will return 3. 23 | """ 24 | ret = 0 25 | rank = len(shape) 26 | for dim in range(rank): 27 | ret = ret * shape[dim] + coord[dim] 28 | return ret 29 | -------------------------------------------------------------------------------- /tools/docker/tests/dockerfile_cpu_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | IMAGE_NAME="tfsigio/tfio" 18 | IMAGE_TAG="latest" 19 | export PYTHON_BIN_PATH=$(which python3) 20 | 21 | echo "Build the docker image ..." 22 | docker build -f tools/docker/cpu.Dockerfile -t ${IMAGE_NAME}:${IMAGE_TAG} . 23 | 24 | echo "Starting the docker container from image: ${IMAGE_NAME}:${IMAGE_TAG} and validating import ..." 25 | docker run -t --rm ${IMAGE_NAME}:${IMAGE_TAG} python -c "import tensorflow_io as tfio; print(tfio.__version__)" -------------------------------------------------------------------------------- /tools/docker/tests/dockerfile_nightly_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | IMAGE_NAME="tfsigio/tfio" 18 | IMAGE_TAG="nightly" 19 | export PYTHON_BIN_PATH=$(which python3) 20 | 21 | echo "Build the docker image ..." 22 | docker build -f tools/docker/nightly.Dockerfile -t ${IMAGE_NAME}:${IMAGE_TAG} . 23 | 24 | echo "Starting the docker container from image: ${IMAGE_NAME}:${IMAGE_TAG} and validating import ..." 25 | docker run -t --rm ${IMAGE_NAME}:${IMAGE_TAG} python -c "import tensorflow_io as tfio; print(tfio.__version__)" -------------------------------------------------------------------------------- /tools/docker/tests/dockerfile_devel_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | IMAGE_NAME="tfsigio/tfio" 18 | IMAGE_TAG="latest-devel" 19 | export PYTHON_BIN_PATH=$(which python3) 20 | 21 | echo "Build the docker image ..." 22 | docker build -f tools/docker/devel.Dockerfile -t ${IMAGE_NAME}:${IMAGE_TAG} . 23 | 24 | echo "Starting the docker container from image: ${IMAGE_NAME}:${IMAGE_TAG} and building the package ..." 25 | docker run -t --rm --net=host -v ${PWD}:/v -w /v ${IMAGE_NAME}:${IMAGE_TAG} bash tools/docker/tests/bazel_build.sh -------------------------------------------------------------------------------- /R-package/pkgdown/_pkgdown.yml: -------------------------------------------------------------------------------- 1 | template: 2 | params: 3 | bootswatch: flatly 4 | 5 | navbar: 6 | title: "tfio" 7 | type: inverse 8 | left: 9 | - text: "Home" 10 | href: index.html 11 | - text: "Tutorials" 12 | menu: 13 | - text: "TensorFlow IO Basics" 14 | href: articles/introduction.html 15 | - text: "Reference" 16 | href: reference/index.html 17 | right: 18 | - icon: fa-github 19 | href: https://github.com/tensorflow/io 20 | 21 | reference: 22 | 23 | - title: "Dataset Builders" 24 | contents: 25 | - ignite_dataset 26 | - kafka_dataset 27 | - kinesis_dataset 28 | - lmdb_dataset 29 | - mnist_image_dataset 30 | - mnist_label_dataset 31 | - parquet_dataset 32 | - pubsub_dataset 33 | - sequence_file_dataset 34 | - tiff_dataset 35 | - video_dataset 36 | - webp_dataset 37 | - arrow_feather_dataset 38 | - arrow_stream_dataset 39 | 40 | - title: "Utility Methods" 41 | contents: 42 | - from_schema 43 | - from_schema.arrow_feather_dataset 44 | - from_schema.arrow_stream_dataset 45 | - make_libsvm_dataset 46 | 47 | - title: "Re-exported Methods" 48 | contents: 49 | - reexports 50 | -------------------------------------------------------------------------------- /third_party/libgeotiff.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # GeoTIFF library 3 | 4 | licenses(["notice"]) # Public/BSD-like licence 5 | 6 | exports_files(["LICENSE"]) 7 | 8 | cc_library( 9 | name = "libgeotiff", 10 | srcs = glob([ 11 | "libxtiff/*.c", 12 | "*.c", 13 | "*.inc", 14 | ]), 15 | hdrs = glob([ 16 | "libxtiff/*.h", 17 | "*.h", 18 | ]) + [ 19 | "geo_config.h", 20 | ], 21 | defines = [], 22 | includes = [ 23 | "libxtiff", 24 | ], 25 | linkopts = [], 26 | visibility = ["//visibility:public"], 27 | deps = [ 28 | "@libtiff", 29 | "@proj", 30 | ], 31 | ) 32 | 33 | genrule( 34 | name = "geo_config_h", 35 | outs = ["geo_config.h"], 36 | cmd = "\n".join([ 37 | "cat <<'EOF' >$@", 38 | "#ifndef GEO_CONFIG_H", 39 | "#define GEO_CONFIG_H", 40 | "#define STDC_HEADERS 1", 41 | "#define HAVE_STDLIB_H 1", 42 | "#define HAVE_STRING_H 1", 43 | "#define HAVE_STRINGS_H 1", 44 | "#define HAVE_LIBPROJ 1", 45 | "#define HAVE_PROJECTS_H 1", 46 | "/* #undef GEO_NORMALIZE_DISABLE_TOWGS84 */", 47 | "#endif /* ndef GEO_CONFIG_H */", 48 | "EOF", 49 | ]), 50 | ) 51 | -------------------------------------------------------------------------------- /tensorflow_io/core/kernels/gsmemcachedfs/gce_memcached_server_list_provider.h: -------------------------------------------------------------------------------- 1 | #ifndef TENSORFLOW_IO_GSMEMCACHEDFS_GCE_MEMCACHED_SERVER_LIST_PROVIDER_H_ // NOLINT 2 | #define TENSORFLOW_IO_GSMEMCACHEDFS_GCE_MEMCACHED_SERVER_LIST_PROVIDER_H_ // NOLINT 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | #include "absl/memory/memory.h" 9 | #include "tensorflow/core/lib/core/status.h" 10 | #include "tensorflow/tsl/platform/cloud/compute_engine_metadata_client.h" 11 | 12 | namespace tensorflow { 13 | 14 | class GceMemcachedServerListProvider { 15 | public: 16 | explicit GceMemcachedServerListProvider( 17 | std::shared_ptr metadata_client); 18 | virtual ~GceMemcachedServerListProvider(); 19 | 20 | Status GetServerList(std::vector* server_list); 21 | 22 | void SetMetadataClient( 23 | std::shared_ptr metadata_client); 24 | 25 | private: 26 | std::shared_ptr google_metadata_client_; 27 | std::vector cached_list_; 28 | TF_DISALLOW_COPY_AND_ASSIGN(GceMemcachedServerListProvider); 29 | }; 30 | 31 | } // namespace tensorflow 32 | 33 | #endif // TENSORFLOW_IO_GSMEMCACHEDFS_GCE_MEMCACHED_SERVER_LIST_PROVIDER_H_ 34 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/ffmpeg_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """FFmpeg""" 16 | 17 | 18 | def decode_video(content, index=0, name=None): 19 | """Decode video stream from a video file. 20 | 21 | Args: 22 | content: A `Tensor` of type `string`. 23 | index: The stream index. 24 | 25 | Returns: 26 | value: A `uint8` Tensor. 27 | """ 28 | from tensorflow_io.python.ops import ( # pylint: disable=import-outside-toplevel 29 | ffmpeg_ops, 30 | ) 31 | 32 | return ffmpeg_ops.io_ffmpeg_decode_video(content, index, name=name) 33 | -------------------------------------------------------------------------------- /R-package/man/kafka_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/kafka_dataset.R 3 | \name{kafka_dataset} 4 | \alias{kafka_dataset} 5 | \title{Creates a \code{KafkaDataset}.} 6 | \usage{ 7 | kafka_dataset( 8 | topics, 9 | servers = "localhost", 10 | group = "", 11 | eof = FALSE, 12 | timeout = 1000 13 | ) 14 | } 15 | \arguments{ 16 | \item{topics}{A \code{tf.string} tensor containing one or more subscriptions, in 17 | the format of \verb{[topic:partition:offset:length]}, by default length is -1 18 | for unlimited.} 19 | 20 | \item{servers}{A list of bootstrap servers.} 21 | 22 | \item{group}{The consumer group id.} 23 | 24 | \item{eof}{If True, the kafka reader will stop on EOF.} 25 | 26 | \item{timeout}{The timeout value for the Kafka Consumer to wait (in 27 | millisecond).} 28 | } 29 | \description{ 30 | Creates a \code{KafkaDataset}. 31 | } 32 | \examples{ 33 | \dontrun{ 34 | dataset <- kafka_dataset( 35 | topics = list("test:0:0:4"), group = "test", eof = TRUE) \%>\% 36 | dataset_repeat(1) 37 | 38 | sess <- tf$Session() 39 | iterator <- make_iterator_one_shot(dataset) 40 | next_batch <- iterator_get_next(iterator) 41 | 42 | until_out_of_range({ 43 | batch <- sess$run(next_batch) 44 | print(batch) 45 | }) 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /tools/docker/tests/bazel_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | echo "Configuring the environment for build ..." 18 | ./configure.sh 19 | 20 | echo "Cleaning up existing bazel build files ..." 21 | rm -rf bazel-* 22 | 23 | echo "Building the tensorflow-io package ..." 24 | bazel build -j 8 --copt=-msse4.2 --copt=-mavx --compilation_mode=opt --verbose_failures --test_output=errors --crosstool_top=//third_party/toolchains/gcc7_manylinux2010:toolchain //tensorflow_io/... 25 | 26 | echo "Validating import ..." 27 | python -c "import tensorflow_io as tfio; print(tfio.__version__)" -------------------------------------------------------------------------------- /tools/lint/BUILD: -------------------------------------------------------------------------------- 1 | load("defs.bzl", "lint") 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | lint( 6 | name = "lint", 7 | mode = "lint", 8 | ) 9 | 10 | lint( 11 | name = "check", 12 | mode = "check", 13 | ) 14 | 15 | exports_files(["lint.tpl"]) 16 | 17 | py_binary( 18 | name = "pyupgrade_py", 19 | srcs = ["pyupgrade_python.py"], 20 | main = "pyupgrade_python.py", 21 | deps = [ 22 | ], 23 | ) 24 | 25 | genrule( 26 | name = "pyupgrade", 27 | srcs = [], 28 | outs = ["pyupgrade"], 29 | cmd = "echo '$(location :pyupgrade_py) \"$$@\"' > $@", 30 | executable = True, 31 | tools = [ 32 | ":pyupgrade_py", 33 | ], 34 | ) 35 | 36 | py_binary( 37 | name = "black_py", 38 | srcs = ["black_python.py"], 39 | main = "black_python.py", 40 | deps = [ 41 | ], 42 | ) 43 | 44 | genrule( 45 | name = "black", 46 | srcs = [], 47 | outs = ["black"], 48 | cmd = "echo '$(location :black_py) \"$$@\"' > $@", 49 | executable = True, 50 | tools = [ 51 | ":black_py", 52 | ], 53 | ) 54 | 55 | genrule( 56 | name = "clang_format", 57 | srcs = ["@llvm_toolchain//:bin/clang-format"], 58 | outs = ["clang-format"], 59 | cmd = "cat $< > $@", 60 | executable = True, 61 | ) 62 | -------------------------------------------------------------------------------- /tensorflow_io/core/ops/filesystem_ops.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow/core/framework/common_shape_fns.h" 17 | #include "tensorflow/core/framework/op.h" 18 | #include "tensorflow/core/framework/shape_inference.h" 19 | 20 | namespace tensorflow { 21 | namespace io { 22 | namespace { 23 | 24 | REGISTER_OP("IO>FileSystemSetConfiguration") 25 | .Input("scheme: string") 26 | .Input("key: string") 27 | .Input("value: string") 28 | .SetIsStateful() 29 | .SetShapeFn(shape_inference::ScalarShape); 30 | 31 | } // namespace 32 | } // namespace io 33 | } // namespace tensorflow 34 | -------------------------------------------------------------------------------- /tensorflow_io/core/kernels/avro/utils/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | load("@io_bazel_rules_go//go:def.bzl", "go_binary") 10 | 11 | cc_library( 12 | name = "avro_utils_api", 13 | hdrs = [ 14 | "avro_parser.h", 15 | "avro_parser_tree.h", 16 | "avro_record_reader.h", 17 | "name_utils.h", # TODO(fraudies): delete when tensorflow/core/kernels/data/name_utils.h visible 18 | "parse_avro_attrs.h", 19 | "prefix_tree.h", 20 | "value_buffer.h", 21 | ], 22 | linkstatic = True, 23 | deps = [ 24 | "//tensorflow_io/core:dataset_ops", 25 | "@com_googlesource_code_re2//:re2", 26 | ], 27 | ) 28 | 29 | cc_library( 30 | name = "avro_utils", 31 | srcs = [ 32 | "avro_parser.cc", 33 | "avro_parser_tree.cc", 34 | "avro_record_reader.cc", 35 | "name_utils.cc", # TODO(fraudies): delete when tensorflow/core/kernels/data/name_utils.h visible 36 | "parse_avro_attrs.cc", 37 | "prefix_tree.cc", 38 | "value_buffer.cc", 39 | ], 40 | linkstatic = True, 41 | deps = [ 42 | ":avro_utils_api", 43 | "@avro", 44 | ], 45 | ) 46 | -------------------------------------------------------------------------------- /tests/test_filesystem.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not 4 | # use this file except in compliance with the License. You may obtain a copy of 5 | # the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations under 13 | # the License. 14 | # ============================================================================== 15 | """Tests for file system configuration API""" 16 | 17 | import pytest 18 | 19 | import tensorflow as tf 20 | import tensorflow_io as tfio 21 | import tensorflow_io_gcs_filesystem 22 | 23 | 24 | def test_filesystem_configuration(): 25 | """Test case for configuration""" 26 | with pytest.raises(tf.errors.UnimplementedError) as e: 27 | tfio.experimental.filesystem.set_configuration("gs", "123", "456") 28 | assert ( 29 | "SetConfiguration not implemented for gcs ('gs://') file system: name = 123, value = 456" 30 | in str(e.value) 31 | ) 32 | -------------------------------------------------------------------------------- /R-package/man/parquet_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/parquet_dataset.R 3 | \name{parquet_dataset} 4 | \alias{parquet_dataset} 5 | \title{Create a \code{ParquetDataset}.} 6 | \usage{ 7 | parquet_dataset(filenames, columns, output_types) 8 | } 9 | \arguments{ 10 | \item{filenames}{A 0-D or 1-D \code{tf.string} tensor containing one or more 11 | filenames.} 12 | 13 | \item{columns}{A 0-D or 1-D \code{tf.int32} tensor containing the columns to 14 | extract.} 15 | 16 | \item{output_types}{A tuple of \code{tf.DType} objects representing the types of 17 | the columns returned.} 18 | } 19 | \description{ 20 | This allows a user to read data from a parquet file. 21 | } 22 | \examples{ 23 | \dontrun{ 24 | dtypes <- tf$python$framework$dtypes 25 | output_types <- reticulate::tuple( 26 | dtypes$bool, dtypes$int32, dtypes$int64, dtypes$float32, dtypes$float64) 27 | dataset <- parquet_dataset( 28 | filenames = list("testdata/parquet_cpp_example.parquet"), 29 | columns = list(0, 1, 2, 4, 5), 30 | output_types = output_types) \%>\% 31 | dataset_repeat(2) 32 | 33 | sess <- tf$Session() 34 | iterator <- make_iterator_one_shot(dataset) 35 | next_batch <- iterator_get_next(iterator) 36 | 37 | until_out_of_range({ 38 | batch <- sess$run(next_batch) 39 | print(batch) 40 | }) 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /tensorflow_io/core/kernels/avro/utils/parse_avro_attrs.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #include "tensorflow/core/framework/types.h" 16 | #include "tensorflow/core/lib/core/errors.h" 17 | #include "tensorflow/core/platform/types.h" 18 | 19 | namespace tensorflow { 20 | namespace data { 21 | 22 | // Checks for valid type for the avro attributes; currently we support bool, 23 | // int, long, float, double, string. 24 | // 25 | // 'dtype' The data type. 26 | // 27 | // returns OK if any of the supported types; otherwise false. 28 | // 29 | tensorflow::Status CheckValidType(const tensorflow::DataType& dtype); 30 | 31 | } // namespace data 32 | } // namespace tensorflow 33 | -------------------------------------------------------------------------------- /tests/test_image/GeogToWGS84GeoKey5.lgo: -------------------------------------------------------------------------------- 1 | Geotiff_Information: 2 | Version: 1 3 | Key_Revision: 1.0 4 | Tagged_Information: 5 | ModelTiepointTag (2,3): 6 | 50.5 50.5 0 7 | 9.00105738 52.001376 0 8 | ModelPixelScaleTag (1,3): 9 | 2.77777778e-005 2.77777778e-005 1 10 | End_Of_Tags. 11 | Keyed_Information: 12 | GTModelTypeGeoKey (Short,1): ModelTypeGeographic 13 | GTRasterTypeGeoKey (Short,1): RasterPixelIsArea 14 | GeographicTypeGeoKey (Short,1): User-Defined 15 | GeogGeodeticDatumGeoKey (Short,1): User-Defined 16 | GeogAngularUnitsGeoKey (Short,1): Angular_Degree 17 | GeogEllipsoidGeoKey (Short,1): Ellipse_Bessel_1841 18 | Unknown-2062 (Double,7): 598.1 73.7 418.2 19 | 0.202 0.045 -2.455 20 | 6.7 21 | End_Of_Keys. 22 | End_Of_Geotiff. 23 | 24 | Ellipsoid: 7004/(unknown) (0.00,0.00) 25 | 26 | Corner Coordinates: 27 | Upper Left ( 8d59'58.76"E, 52d 0'10.00"N) 28 | Lower Left ( 8d59'58.76"E, 51d59'59.90"N) 29 | Upper Right ( 9d 0' 8.86"E, 52d 0'10.00"N) 30 | Lower Right ( 9d 0' 8.86"E, 51d59'59.90"N) 31 | Center ( 9d 0' 3.81"E, 52d 0' 4.95"N) 32 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/audio.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.audio""" 16 | 17 | from tensorflow_io.python.ops.audio_ops import ( # pylint: disable=unused-import 18 | spectrogram, 19 | inverse_spectrogram, 20 | melscale, 21 | dbscale, 22 | remix, 23 | split, 24 | trim, 25 | freq_mask, 26 | time_mask, 27 | fade, 28 | resample, 29 | decode_wav, 30 | encode_wav, 31 | decode_flac, 32 | encode_flac, 33 | decode_vorbis, 34 | encode_vorbis, 35 | decode_mp3, 36 | encode_mp3, 37 | decode_aac, 38 | encode_aac, 39 | AudioIOTensor, 40 | AudioIODataset, 41 | ) 42 | -------------------------------------------------------------------------------- /tensorflow_io/core/ops/genome_ops.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow/core/framework/common_shape_fns.h" 17 | #include "tensorflow/core/framework/op.h" 18 | #include "tensorflow/core/framework/shape_inference.h" 19 | 20 | namespace tensorflow { 21 | 22 | REGISTER_OP("IO>ReadFastq") 23 | .Input("filename: string") 24 | .Output("sequences: string") 25 | .Output("raw_quality: string") 26 | .SetShapeFn([](::tensorflow::shape_inference::InferenceContext* c) { 27 | c->set_output(0, c->MakeShape({c->UnknownDim()})); 28 | c->set_output(1, c->MakeShape({c->UnknownDim()})); 29 | return OkStatus(); 30 | }); 31 | 32 | } // namespace tensorflow 33 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/color.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.color""" 16 | 17 | from tensorflow_io.python.experimental.color_ops import ( # pylint: disable=unused-import 18 | rgb_to_bgr, 19 | bgr_to_rgb, 20 | rgb_to_rgba, 21 | rgba_to_rgb, 22 | rgb_to_ycbcr, 23 | ycbcr_to_rgb, 24 | rgb_to_ypbpr, 25 | ypbpr_to_rgb, 26 | rgb_to_ydbdr, 27 | ydbdr_to_rgb, 28 | rgb_to_hsv, 29 | hsv_to_rgb, 30 | rgb_to_yiq, 31 | yiq_to_rgb, 32 | rgb_to_yuv, 33 | yuv_to_rgb, 34 | rgb_to_xyz, 35 | xyz_to_rgb, 36 | rgb_to_lab, 37 | lab_to_rgb, 38 | rgb_to_grayscale, 39 | ) 40 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/filesystem_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """filesystem""" 16 | 17 | from tensorflow_io.python.ops import core_ops 18 | 19 | 20 | def set_configuration(scheme, key, value, name=None): 21 | """ 22 | Set configuration of the file system. 23 | 24 | Args: 25 | scheme: File system scheme. 26 | key: The name of the configuration option. 27 | value: The value of the configuration option. 28 | name: A name for the operation (optional). 29 | 30 | Returns: 31 | None. 32 | """ 33 | 34 | return core_ops.io_file_system_set_configuration( 35 | scheme, key=key, value=value, name=name 36 | ) 37 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/streaming.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.streaming""" 16 | 17 | from tensorflow_io.python.experimental.kafka_group_io_dataset_ops import ( # pylint: disable=unused-import 18 | KafkaGroupIODataset, 19 | ) 20 | from tensorflow_io.python.experimental.kafka_batch_io_dataset_ops import ( # pylint: disable=unused-import 21 | KafkaBatchIODataset, 22 | ) 23 | from tensorflow_io.python.experimental.pulsar_dataset_ops import ( # pylint: disable=unused-import 24 | PulsarIODataset, 25 | ) 26 | from tensorflow_io.python.experimental.pulsar_writer_ops import ( # pylint: disable=unused-import 27 | PulsarWriter, 28 | ) 29 | -------------------------------------------------------------------------------- /tensorflow_io/kafka.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Deprecated Kafka Dataset. 16 | 17 | @@KafkaOutputSequence 18 | @@KafkaDataset 19 | @@write_kafka 20 | """ 21 | 22 | 23 | from tensorflow.python.util.all_util import remove_undocumented 24 | 25 | from tensorflow_io.python.ops.kafka_deprecated_dataset_ops import KafkaOutputSequence 26 | from tensorflow_io.python.ops.kafka_deprecated_dataset_ops import KafkaDataset 27 | from tensorflow_io.python.ops.kafka_deprecated_dataset_ops import write_kafka 28 | 29 | _allowed_symbols = [ 30 | "KafkaOutputSequence", 31 | "KafkaDataset", 32 | "write_kafka", 33 | ] 34 | 35 | remove_undocumented(__name__, allowed_exception_list=_allowed_symbols) 36 | -------------------------------------------------------------------------------- /tensorflow_io/core/kernels/gsmemcachedfs/gs_memcached_file_system.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_io/core/kernels/gsmemcachedfs/gs_memcached_file_system.h" 17 | 18 | namespace tensorflow { 19 | 20 | Status GsMemcachedFileSystem::ParseGcsPath(StringPiece fname, 21 | bool empty_object_ok, string* bucket, 22 | string* object) { 23 | return ParseGcsPathForScheme(fname, "gsmemcached", empty_object_ok, bucket, 24 | object); 25 | } 26 | 27 | } // namespace tensorflow 28 | 29 | REGISTER_FILE_SYSTEM("gsmemcached", 30 | ::tensorflow::RetryingGsMemcachedFileSystem); 31 | -------------------------------------------------------------------------------- /tests/test_gcloud/testbench/error_response.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google LLC. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | """A helper class to send error responses in the storage client test bench.""" 15 | 16 | import flask 17 | 18 | 19 | class ErrorResponse(Exception): 20 | """Simplify generation of error responses.""" 21 | 22 | status_code = 400 23 | 24 | def __init__(self, message, status_code=None, payload=None): 25 | Exception.__init__(self) 26 | self.message = message 27 | if status_code is not None: 28 | self.status_code = status_code 29 | self.payload = payload 30 | 31 | def as_response(self): 32 | kv = dict(self.payload or ()) 33 | kv["message"] = self.message 34 | response = flask.jsonify(kv) 35 | response.status_code = self.status_code 36 | return response 37 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/experimental/columnar.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io.experimental.columnar""" 16 | 17 | from tensorflow_io.python.experimental.avro_record_dataset_ops import ( # pylint: disable=unused-import 18 | AvroRecordDataset, 19 | ) 20 | 21 | from tensorflow_io.python.experimental.make_avro_record_dataset import ( # pylint: disable=unused-import 22 | make_avro_record_dataset, 23 | ) 24 | 25 | from tensorflow_io.python.experimental.parse_avro_ops import ( # pylint: disable=unused-import 26 | parse_avro, 27 | ) 28 | 29 | from tensorflow_io.python.experimental.varlen_feature_with_rank import ( # pylint: disable=unused-import 30 | VarLenFeatureWithRank, 31 | ) 32 | -------------------------------------------------------------------------------- /tensorflow_io/python/ops/archive_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Archive.""" 16 | 17 | from tensorflow_io.python.ops import core_ops 18 | 19 | 20 | def list_archive_entries(filename, filters, **kwargs): 21 | """list_archive_entries""" 22 | memory = kwargs.get("memory", "") 23 | if not isinstance(filters, list): 24 | filters = [filters] 25 | return core_ops.io_list_archive_entries(filename, filters=filters, memory=memory) 26 | 27 | 28 | def read_archive( 29 | filename, format, entries, **kwargs 30 | ): # pylint: disable=redefined-builtin 31 | """read_archive""" 32 | memory = kwargs.get("memory", "") 33 | return core_ops.io_read_archive(filename, format, entries, memory=memory) 34 | -------------------------------------------------------------------------------- /R-package/docs/extra.css: -------------------------------------------------------------------------------- 1 | 2 | h4.date, 3 | h4.author { 4 | display: none; 5 | } 6 | 7 | h2.hasAnchor { 8 | font-weight: 350; 9 | } 10 | 11 | .ref-index tbody { 12 | margin-bottom: 60px; 13 | } 14 | 15 | pre:not([class]) { 16 | background-color: white; 17 | } 18 | 19 | .contents a { 20 | text-decoration: none; 21 | } 22 | 23 | blockquote { 24 | font-size: inherit; 25 | } 26 | 27 | .examples .page-header { 28 | border-bottom: none; 29 | margin: 0; 30 | padding-bottom: 0; 31 | } 32 | 33 | .examples .sourceCode { 34 | margin-top: 25px; 35 | } 36 | 37 | #sidebar .nav>li>a { 38 | padding-top: 1px; 39 | padding-bottom: 2px; 40 | } 41 | 42 | #installation .sourceCode { 43 | font-size: 13px; 44 | } 45 | 46 | .r-plot { 47 | margin-bottom: 25px; 48 | } 49 | 50 | .screenshot { 51 | margin-bottom: 20px; 52 | } 53 | 54 | .source-ref { 55 | margin-bottom: 20px; 56 | } 57 | 58 | .source-ref .caption { 59 | display: none; 60 | } 61 | 62 | 63 | 64 | .alert-warning { 65 | color: #8a6d3b; 66 | background-color: #fcf8e3; 67 | padding: 15px; 68 | margin-top: 20px; 69 | margin-bottom: 20px; 70 | background-image: linear-gradient(to bottom,#fcf8e3 0,#f8efc0 100%); 71 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0); 72 | background-repeat: repeat-x; 73 | border: 1px solid #f5e79e; 74 | border-radius: 4px; 75 | } 76 | 77 | 78 | -------------------------------------------------------------------------------- /R-package/pkgdown/extra.css: -------------------------------------------------------------------------------- 1 | 2 | h4.date, 3 | h4.author { 4 | display: none; 5 | } 6 | 7 | h2.hasAnchor { 8 | font-weight: 350; 9 | } 10 | 11 | .ref-index tbody { 12 | margin-bottom: 60px; 13 | } 14 | 15 | pre:not([class]) { 16 | background-color: white; 17 | } 18 | 19 | .contents a { 20 | text-decoration: none; 21 | } 22 | 23 | blockquote { 24 | font-size: inherit; 25 | } 26 | 27 | .examples .page-header { 28 | border-bottom: none; 29 | margin: 0; 30 | padding-bottom: 0; 31 | } 32 | 33 | .examples .sourceCode { 34 | margin-top: 25px; 35 | } 36 | 37 | #sidebar .nav>li>a { 38 | padding-top: 1px; 39 | padding-bottom: 2px; 40 | } 41 | 42 | #installation .sourceCode { 43 | font-size: 13px; 44 | } 45 | 46 | .r-plot { 47 | margin-bottom: 25px; 48 | } 49 | 50 | .screenshot { 51 | margin-bottom: 20px; 52 | } 53 | 54 | .source-ref { 55 | margin-bottom: 20px; 56 | } 57 | 58 | .source-ref .caption { 59 | display: none; 60 | } 61 | 62 | 63 | 64 | .alert-warning { 65 | color: #8a6d3b; 66 | background-color: #fcf8e3; 67 | padding: 15px; 68 | margin-top: 20px; 69 | margin-bottom: 20px; 70 | background-image: linear-gradient(to bottom,#fcf8e3 0,#f8efc0 100%); 71 | filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0); 72 | background-repeat: repeat-x; 73 | border: 1px solid #f5e79e; 74 | border-radius: 4px; 75 | } 76 | 77 | 78 | -------------------------------------------------------------------------------- /R-package/man/arrow_stream_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arrow_dataset.R 3 | \name{arrow_stream_dataset} 4 | \alias{arrow_stream_dataset} 5 | \title{Creates a \code{ArrowStreamDataset}.} 6 | \usage{ 7 | arrow_stream_dataset(host, columns, output_types, output_shapes = NULL) 8 | } 9 | \arguments{ 10 | \item{host}{A \code{tf.string} tensor or string defining the input stream. 11 | For a socket client, use ":", for stdin use "STDIN".} 12 | 13 | \item{columns}{A list of column indices to be used in the Dataset.} 14 | 15 | \item{output_types}{Tensor dtypes of the output tensors.} 16 | 17 | \item{output_shapes}{TensorShapes of the output tensors or \code{NULL} to infer 18 | partial.} 19 | } 20 | \description{ 21 | An Arrow Dataset for reading record batches from an input stream. Currently 22 | supported input streams are a socket client or stdin. 23 | } 24 | \examples{ 25 | \dontrun{ 26 | dataset <- arrow_stream_dataset( 27 | host, 28 | columns = reticulate::tuple(0L, 1L), 29 | output_types = reticulate::tuple(tf$int32, tf$float32), 30 | output_shapes = reticulate::tuple(list(), list())) \%>\% 31 | dataset_repeat(1) 32 | 33 | sess <- tf$Session() 34 | iterator <- make_iterator_one_shot(dataset) 35 | next_batch <- iterator_get_next(iterator) 36 | 37 | until_out_of_range({ 38 | batch <- sess$run(next_batch) 39 | print(batch) 40 | }) 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /tensorflow_io/core/ops/obj_ops.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow/core/framework/common_shape_fns.h" 17 | #include "tensorflow/core/framework/op.h" 18 | #include "tensorflow/core/framework/shape_inference.h" 19 | 20 | namespace tensorflow { 21 | namespace io { 22 | namespace { 23 | 24 | REGISTER_OP("IO>DecodeObj") 25 | .Input("input: string") 26 | .Output("output: float32") 27 | .SetShapeFn([](shape_inference::InferenceContext* c) { 28 | shape_inference::ShapeHandle unused; 29 | TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 0, &unused)); 30 | c->set_output(0, c->MakeShape({c->UnknownDim(), 3})); 31 | return OkStatus(); 32 | }); 33 | 34 | } // namespace 35 | } // namespace io 36 | } // namespace tensorflow 37 | -------------------------------------------------------------------------------- /tests/test_obj.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not 4 | # use this file except in compliance with the License. You may obtain a copy of 5 | # the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations under 13 | # the License. 14 | # ============================================================================== 15 | """Test Wavefront OBJ""" 16 | 17 | import os 18 | import numpy as np 19 | import pytest 20 | 21 | import tensorflow as tf 22 | import tensorflow_io as tfio 23 | 24 | 25 | def test_decode_obj(): 26 | """Test case for decode obj""" 27 | filename = os.path.join( 28 | os.path.dirname(os.path.abspath(__file__)), 29 | "test_obj", 30 | "sample.obj", 31 | ) 32 | filename = "file://" + filename 33 | 34 | obj = tfio.experimental.image.decode_obj(tf.io.read_file(filename)) 35 | expected = np.array( 36 | [[-0.5, 0.0, 0.4], [-0.5, 0.0, -0.8], [-0.5, 1.0, -0.8], [-0.5, 1.0, 0.4]], 37 | dtype=np.float32, 38 | ) 39 | assert np.array_equal(obj, expected) 40 | -------------------------------------------------------------------------------- /R-package/DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: tfio 2 | Type: Package 3 | Title: Interface to 'TensorFlow IO' 4 | Version: 0.4.1 5 | Authors@R: c( 6 | person("TensorFlow IO Contributors", role = c("aut", "cph"), 7 | email = "io@tensorflow.org", 8 | comment = "Full list of contributors can be found at "), 9 | person("Yuan", "Tang", role = c("aut", "cre"), 10 | email = "terrytangyuan@gmail.com", 11 | comment = c(ORCID = "0000-0001-5243-233X")), 12 | person(family = "TensorFlow Authors", role = c("cph")), 13 | person("Ant Financial", role = c("cph")), 14 | person("RStudio", role = c("cph")) 15 | ) 16 | Description: Interface to 'TensorFlow IO', Datasets and filesystem extensions maintained by `TensorFlow SIG-IO` . 17 | License: Apache License 2.0 18 | URL: https://github.com/tensorflow/io 19 | BugReports: https://github.com/tensorflow/io/issues 20 | SystemRequirements: TensorFlow >= 1.13.0 (https://www.tensorflow.org/) and TensorFlow IO >= 0.4.0 (https://github.com/tensorflow/io) 21 | Encoding: UTF-8 22 | LazyData: true 23 | Depends: 24 | R (>= 3.1) 25 | Imports: 26 | reticulate (>= 1.10), 27 | tensorflow (>= 1.9), 28 | tfdatasets (>= 1.9), 29 | forge, 30 | magrittr 31 | Roxygen: list(markdown = TRUE) 32 | RoxygenNote: 7.0.2 33 | Suggests: 34 | testthat, 35 | knitr 36 | VignetteBuilder: knitr 37 | -------------------------------------------------------------------------------- /tensorflow_io/bigquery.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Cloud BigQuery Client for TensorFlow. 16 | 17 | This package allows TensorFlow to interface directly with Cloud BigQuery 18 | for high-speed data loading. 19 | 20 | @@BigQueryClient 21 | @@BigQueryReadSession 22 | @@BigQueryTestClient 23 | 24 | """ 25 | 26 | 27 | from tensorflow.python.util.all_util import remove_undocumented 28 | from tensorflow_io.python.ops.bigquery_dataset_ops import BigQueryClient 29 | from tensorflow_io.python.ops.bigquery_dataset_ops import BigQueryReadSession 30 | from tensorflow_io.python.ops.bigquery_dataset_ops import BigQueryTestClient 31 | 32 | _allowed_symbols = ["BigQueryClient", "BigQueryReadSession", "BigQueryTestClient"] 33 | 34 | remove_undocumented(__name__, _allowed_symbols) 35 | -------------------------------------------------------------------------------- /R-package/man/arrow_feather_dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arrow_dataset.R 3 | \name{arrow_feather_dataset} 4 | \alias{arrow_feather_dataset} 5 | \title{Creates a \code{ArrowFeatherDataset}.} 6 | \usage{ 7 | arrow_feather_dataset(filenames, columns, output_types, output_shapes = NULL) 8 | } 9 | \arguments{ 10 | \item{filenames}{A \code{tf.string} tensor, list or scalar containing files in 11 | Arrow Feather format.} 12 | 13 | \item{columns}{A list of column indices to be used in the Dataset.} 14 | 15 | \item{output_types}{Tensor dtypes of the output tensors.} 16 | 17 | \item{output_shapes}{TensorShapes of the output tensors or \code{NULL} to infer 18 | partial.} 19 | } 20 | \description{ 21 | An Arrow Dataset for reading record batches from Arrow feather files. Feather 22 | is a light-weight columnar format ideal for simple writing of Pandas 23 | DataFrames. 24 | } 25 | \examples{ 26 | \dontrun{ 27 | dataset <- arrow_feather_dataset( 28 | list('/path/to/a.feather', '/path/to/b.feather'), 29 | columns = reticulate::tuple(0L, 1L), 30 | output_types = reticulate::tuple(tf$int32, tf$float32), 31 | output_shapes = reticulate::tuple(list(), list())) \%>\% 32 | dataset_repeat(1) 33 | 34 | sess <- tf$Session() 35 | iterator <- make_iterator_one_shot(dataset) 36 | next_batch <- iterator_get_next(iterator) 37 | 38 | until_out_of_range({ 39 | batch <- sess$run(next_batch) 40 | print(batch) 41 | }) 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/core/file_system_plugin_gs.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_IO_PLUGIN_GS_CORE_FILE_SYSTEM_PLUGINS_H 17 | #define TENSORFLOW_IO_PLUGIN_GS_CORE_FILE_SYSTEM_PLUGINS_H 18 | 19 | #include 20 | 21 | #include "tensorflow/c/experimental/filesystem/filesystem_interface.h" 22 | 23 | namespace tensorflow { 24 | namespace io { 25 | 26 | static void* plugin_memory_allocate(size_t size) { return calloc(1, size); } 27 | static void plugin_memory_free(void* ptr) { free(ptr); } 28 | 29 | namespace gs { 30 | 31 | void ProvideFilesystemSupportFor(TF_FilesystemPluginOps* ops, const char* uri); 32 | 33 | } // namespace gs 34 | 35 | } // namespace io 36 | } // namespace tensorflow 37 | 38 | #endif // TENSORFLOW_IO_PLUGIN_GS_CORE_FILE_SYSTEM_PLUGINS_H 39 | -------------------------------------------------------------------------------- /tensorflow_io/core/filesystems/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "filesystem_plugins_header", 12 | srcs = [ 13 | "filesystem_plugins.h", 14 | ] + select({ 15 | "@bazel_tools//src/conditions:windows": [ 16 | "@local_config_tf//:stub/libtensorflow_framework.lib", 17 | ], 18 | "//conditions:default": [ 19 | "@local_config_tf//:stub/libtensorflow_framework.so", 20 | ], 21 | }), 22 | copts = tf_io_copts(), 23 | linkstatic = True, 24 | deps = [ 25 | "@local_config_tf//:tf_c_header_lib", 26 | "@local_config_tf//:tf_tsl_header_lib", 27 | ], 28 | alwayslink = 1, 29 | ) 30 | 31 | cc_library( 32 | name = "filesystem_plugins", 33 | srcs = [ 34 | "filesystem_plugins.cc", 35 | ], 36 | copts = tf_io_copts(), 37 | linkstatic = True, 38 | deps = [ 39 | "//tensorflow_io/core/filesystems/az", 40 | "//tensorflow_io/core/filesystems/hdfs", 41 | "//tensorflow_io/core/filesystems/http", 42 | "//tensorflow_io/core/filesystems/s3", 43 | ] + select({ 44 | "@bazel_tools//src/conditions:windows": [], 45 | "//conditions:default": [ 46 | "//tensorflow_io/core/filesystems/oss", 47 | ], 48 | }), 49 | alwayslink = 1, 50 | ) 51 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/core/gcs_helper.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #ifndef TENSORFLOW_C_EXPERIMENTAL_FILESYSTEM_PLUGINS_GCS_GCS_HELPER_H_ 16 | #define TENSORFLOW_C_EXPERIMENTAL_FILESYSTEM_PLUGINS_GCS_GCS_HELPER_H_ 17 | 18 | #include 19 | #include 20 | 21 | class TempFile : public std::fstream { 22 | public: 23 | // We should specify openmode each time we call TempFile. 24 | TempFile(const std::string& temp_file_name, std::ios::openmode mode); 25 | TempFile(TempFile&& rhs); 26 | ~TempFile() override; 27 | const std::string getName() const; 28 | bool truncate(); 29 | 30 | private: 31 | const std::string name_; 32 | }; 33 | 34 | std::string GCSGetTempFileName(const std::string& extension); 35 | 36 | #endif // TENSORFLOW_C_EXPERIMENTAL_FILESYSTEM_PLUGINS_GCS_GCS_HELPER_H_ 37 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/file_dataset_ops.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """FileDataset""" 16 | 17 | import tensorflow as tf 18 | from tensorflow_io.python.ops import core_ops 19 | 20 | 21 | @tf.function 22 | def to_file(dataset, filename): 23 | """to_file""" 24 | resource = core_ops.io_file_init(filename) 25 | 26 | dataset = dataset.map(lambda e: (e, tf.constant(False))) 27 | dataset = dataset.concatenate( 28 | tf.data.Dataset.from_tensor_slices([tf.constant([], tf.string)]).map( 29 | lambda e: (e, tf.constant(True)) 30 | ) 31 | ) 32 | dataset = dataset.map( 33 | lambda entry, final: core_ops.io_file_call(entry, final, resource) 34 | ) 35 | dataset = dataset.map(tf.shape) 36 | 37 | return dataset.reduce(0, lambda x, y: x + y) 38 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/varlen_feature_with_rank.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """VarLenFeatureWithRank""" 16 | 17 | import tensorflow as tf 18 | 19 | 20 | class VarLenFeatureWithRank: 21 | """ 22 | A class used to represent VarLenFeature with rank. 23 | This allows rank to be passed by users, and when parsing, 24 | rank will be used to determine the shape of sparse feature. 25 | User should use this class as opposed to VarLenFeature 26 | when defining features of data. 27 | """ 28 | 29 | def __init__(self, dtype: tf.dtypes.DType, rank: int = 1): 30 | self.__dtype = dtype 31 | self.__rank = rank 32 | 33 | @property 34 | def rank(self): 35 | return self.__rank 36 | 37 | @property 38 | def dtype(self): 39 | return self.__dtype 40 | -------------------------------------------------------------------------------- /tensorflow_io/arrow.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Arrow Dataset. 16 | 17 | @@ArrowDataset 18 | @@ArrowFeatherDataset 19 | @@ArrowStreamDataset 20 | @@list_feather_columns 21 | """ 22 | 23 | 24 | from tensorflow.python.util.all_util import remove_undocumented 25 | 26 | from tensorflow_io.python.ops.arrow_dataset_ops import ArrowDataset 27 | from tensorflow_io.python.ops.arrow_dataset_ops import ArrowFeatherDataset 28 | from tensorflow_io.python.ops.arrow_dataset_ops import ArrowStreamDataset 29 | from tensorflow_io.python.ops.arrow_dataset_ops import list_feather_columns 30 | 31 | 32 | _allowed_symbols = [ 33 | "ArrowDataset", 34 | "ArrowFeatherDataset", 35 | "ArrowStreamDataset", 36 | "list_feather_columns", 37 | ] 38 | 39 | remove_undocumented(__name__, allowed_exception_list=_allowed_symbols) 40 | -------------------------------------------------------------------------------- /tensorflow_io/python/api/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """tensorflow_io""" 16 | import os 17 | 18 | # tensorflow_io.core.python.ops is implicitly imported (along with file system) 19 | from tensorflow_io.python.ops.io_dataset import IODataset 20 | from tensorflow_io.python.ops.io_tensor import IOTensor 21 | 22 | from tensorflow_io.python.api import genome 23 | from tensorflow_io.python.api import image 24 | from tensorflow_io.python.api import audio 25 | from tensorflow_io.python.api import version 26 | from tensorflow_io.python.api import experimental 27 | from tensorflow_io.python.api import bigtable 28 | 29 | if os.environ.get("GENERATING_TF_DOCS", ""): 30 | # Mark these as public api for /tools/docs/build_docs.py 31 | from tensorflow_io import arrow 32 | from tensorflow_io import bigquery 33 | 34 | del os 35 | -------------------------------------------------------------------------------- /third_party/uuid.BUILD: -------------------------------------------------------------------------------- 1 | # Description: 2 | # uuid 3 | 4 | licenses(["notice"]) 5 | 6 | cc_library( 7 | name = "uuid", 8 | srcs = [ 9 | "include/all-io.h", 10 | "include/c.h", 11 | "include/md5.h", 12 | "include/nls.h", 13 | "include/randutils.h", 14 | "include/sha1.h", 15 | "include/strutils.h", 16 | "lib/md5.c", 17 | "lib/randutils.c", 18 | "lib/sha1.c", 19 | "libuuid/src/clear.c", 20 | "libuuid/src/compare.c", 21 | "libuuid/src/copy.c", 22 | "libuuid/src/gen_uuid.c", 23 | "libuuid/src/isnull.c", 24 | "libuuid/src/pack.c", 25 | "libuuid/src/parse.c", 26 | "libuuid/src/predefined.c", 27 | "libuuid/src/unpack.c", 28 | "libuuid/src/unparse.c", 29 | "libuuid/src/uuid.h", 30 | "libuuid/src/uuidP.h", 31 | "libuuid/src/uuid_time.c", 32 | "libuuid/src/uuidd.h", 33 | ], 34 | hdrs = [ 35 | "libuuid/src/uuid.h", 36 | ], 37 | copts = ["-std=c99"], 38 | defines = select({ 39 | "//conditions:default": [ 40 | "_XOPEN_SOURCE=700", 41 | "HAVE_NANOSLEEP", 42 | "HAVE_SYS_FILE_H", 43 | "HAVE_MEMCPY", 44 | "HAVE_STRNLEN", 45 | "HAVE_STRNDUP", 46 | "HAVE_STRNCHR", 47 | ], 48 | }), 49 | include_prefix = "uuid", 50 | includes = ["include"], 51 | strip_include_prefix = "libuuid/src", 52 | visibility = ["//visibility:public"], 53 | ) 54 | -------------------------------------------------------------------------------- /tests/test_hdfs/hdfs_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | set -e 18 | set -o pipefail 19 | 20 | HADOOP_RELEASE_TAG="3.2.1" 21 | curl -OL https://github.com/big-data-europe/docker-hadoop/archive/refs/tags/$HADOOP_RELEASE_TAG.tar.gz 22 | tar -xzf $HADOOP_RELEASE_TAG.tar.gz -C /tmp/ 23 | cd /tmp/docker-hadoop-$HADOOP_RELEASE_TAG 24 | # Add following properties 25 | # to prevent following error when closing the hdfs client: 26 | # 27 | # java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes 28 | # being available to try. 29 | 30 | echo " 31 | HDFS_CONF_dfs_client_block_write_replace___datanode___on___failure_enable=true 32 | HDFS_CONF_dfs_client_block_write_replace___datanode___on___failure_policy=NEVER" >> hadoop.env 33 | 34 | docker-compose up -d 35 | echo "Hadoop up" 36 | exit 0 37 | -------------------------------------------------------------------------------- /tests/test_text/stdin_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not 4 | # use this file except in compliance with the License. You may obtain a copy of 5 | # the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations under 13 | # the License. 14 | # ============================================================================== 15 | """Tests for TextDataset with stdin.""" 16 | 17 | import tensorflow as tf 18 | 19 | if not (hasattr(tf, "version") and tf.version.VERSION.startswith("2.")): 20 | tf.compat.v1.enable_eager_execution() 21 | import tensorflow_io.text as text_io # pylint: disable=wrong-import-position 22 | 23 | # Note: run the following: 24 | # tshark -T fields -e frame.number -e ip.dst -e ip.proto -r attack-trace.pcap | python stdin_test.py 25 | 26 | 27 | def f(v): 28 | frame_number, ip_dst, ip_proto = tf.io.decode_csv( 29 | v, [[0], [""], [0]], field_delim="\t" 30 | ) 31 | return frame_number, ip_dst, ip_proto 32 | 33 | 34 | text_dataset = text_io.TextDataset("file://-").map(f) 35 | 36 | for frame_number_value, ip_dst_value, ip_proto_value in text_dataset: 37 | print(ip_dst_value.numpy()) 38 | -------------------------------------------------------------------------------- /docs/tutorials/_toc.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # ============================================================================== 16 | toc: 17 | - title: "BigQuery" 18 | path: /io/tutorials/bigquery 19 | - title: "Decode DICOM files" 20 | path: /io/tutorials/dicom 21 | - title: "Azure blob storage" 22 | path: /io/tutorials/azure 23 | - title: "Prometheus metrics" 24 | path: /io/tutorials/prometheus 25 | - title: "Genomics IO" 26 | path: /io/tutorials/genome 27 | - title: "PostgreSQL database" 28 | path: /io/tutorials/postgresql 29 | - title: "Color Space Conversions" 30 | path: /io/tutorials/colorspace 31 | - title: "Audio" 32 | path: /io/tutorials/audio 33 | - title: "Kafka" 34 | path: /io/tutorials/kafka 35 | - title: "Elasticsearch" 36 | path: /io/tutorials/elasticsearch 37 | - title: "Avro" 38 | path: /io/tutorials/avro 39 | - title: "ORC" 40 | path: /io/tutorials/orc 41 | - title: "MongoDB" 42 | path: /io/tutorials/mongodb 43 | -------------------------------------------------------------------------------- /tests/test_mongodb/mongodb_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | set -e 18 | set -o pipefail 19 | 20 | action=$1 21 | 22 | if [ "$action" == "start" ]; then 23 | 24 | echo "" 25 | echo "Starting the tfio mongodb docker container..." 26 | echo "" 27 | MONGO_IMAGE="mongo" 28 | 29 | docker run --rm -d -p 27017-27019:27017-27019 --name tfio-mongodb \ 30 | -e MONGO_INITDB_ROOT_USERNAME=mongoadmin \ 31 | -e MONGO_INITDB_ROOT_PASSWORD=default_password \ 32 | -e MONGO_INITDB_DATABASE=tfiodb \ 33 | ${MONGO_IMAGE} 34 | 35 | echo "" 36 | echo "Waiting for mongodb to be up and running..." 37 | echo "" 38 | sleep 60 39 | 40 | elif [ "$action" == "stop" ]; then 41 | echo "" 42 | echo "Removing the tfio mongodb container..." 43 | echo "" 44 | docker rm -f tfio-mongodb 45 | 46 | else 47 | echo "" 48 | echo "Invalid value: Use 'start' to run the container and 'stop' to remove it." 49 | echo "" 50 | fi 51 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Tensorflow I/O project welcomes all kinds of contributions, be it code changes, bug-fixes or documentation changes. This guide should help you in taking care of some basic setups & code conventions. 4 | 5 | ## Contributor License Agreement 6 | 7 | Contributions to this project must be accompanied by a Contributor License 8 | Agreement. You (or your employer) retain the copyright to your contribution; 9 | this simply gives us permission to use and redistribute your contributions as 10 | part of the project. Head over to https://cla.developers.google.com/ to see 11 | your current agreements on file or to sign a new one. 12 | 13 | You generally only need to submit a CLA once, so if you've already submitted one 14 | (even if it was for a different project), you probably don't need to do it 15 | again. 16 | 17 | ## Coding Style 18 | 19 | Tensorflow project wide code style guidelines can be followed at [TensorFlow Style Guide - Conventions](https://www.tensorflow.org/community/contribute/code_style) and Tensorflow I/O project specific code style guidelines can be followed at [Style Guide](STYLE_GUIDE.md). 20 | 21 | ## Code Reviews 22 | 23 | All submissions, including submissions by project members, require review. We 24 | use Github pull requests for this purpose. 25 | Tensorflow I/O project's currently open pull requests, 26 | can be viewed [here](https://github.com/tensorflow/io/pulls). 27 | 28 | ## Features, Enhancements & Issues 29 | 30 | All the open work items in Tensorflow I/O project like features, enhancements and open issues 31 | can be viewed [here](https://github.com/tensorflow/io/issues). 32 | -------------------------------------------------------------------------------- /.kokorun/io_gpu.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # ============================================================================== 17 | # Make sure we're in the project root path. 18 | SCRIPT_DIR=$( cd ${0%/*} && pwd -P ) 19 | ROOT_DIR=$( cd "$SCRIPT_DIR/.." && pwd -P ) 20 | if [[ ! -d "tensorflow_io" ]]; then 21 | echo "ERROR: PWD: $PWD is not project root" 22 | exit 1 23 | fi 24 | 25 | set -x 26 | 27 | PLATFORM="$(uname -s | tr 'A-Z' 'a-z')" 28 | 29 | if [[ ${PLATFORM} == "darwin" ]]; then 30 | N_JOBS=$(sysctl -n hw.ncpu) 31 | else 32 | N_JOBS=$(grep -c ^processor /proc/cpuinfo) 33 | fi 34 | 35 | echo "" 36 | echo "Bazel will use ${N_JOBS} concurrent job(s)." 37 | echo "" 38 | 39 | export CC_OPT_FLAGS='-mavx' 40 | export TF_NEED_CUDA=0 # TODO: Verify this is used in GPU custom-op 41 | 42 | export PYTHON_BIN_PATH=`which python` 43 | 44 | python --version 45 | python -m pip --version 46 | docker --version 47 | 48 | bash -x -e .github/workflows/build.gpu.sh 49 | 50 | exit $? 51 | -------------------------------------------------------------------------------- /tensorflow_io/core/kernels/avro/utils/parse_avro_attrs.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_io/core/kernels/avro/utils/parse_avro_attrs.h" 17 | 18 | #include "tensorflow/core/framework/types.h" 19 | #include "tensorflow/core/lib/core/errors.h" 20 | #include "tensorflow/core/platform/types.h" 21 | 22 | namespace tensorflow { 23 | namespace data { 24 | 25 | // As boiler plate I used tensorflow/core/util/example_proto_helper.cc and 26 | // therein "ParseSingleExampleAttrs" and 27 | Status CheckValidType(const DataType& dtype) { 28 | switch (dtype) { 29 | case DT_BOOL: 30 | case DT_INT32: 31 | case DT_INT64: 32 | case DT_FLOAT: 33 | case DT_DOUBLE: 34 | case DT_STRING: 35 | return OkStatus(); 36 | default: 37 | return errors::InvalidArgument("Received input dtype: ", 38 | DataTypeString(dtype)); 39 | } 40 | } 41 | 42 | } // namespace data 43 | } // namespace tensorflow 44 | -------------------------------------------------------------------------------- /tensorflow_io_gcs_filesystem/core/BUILD: -------------------------------------------------------------------------------- 1 | licenses(["notice"]) # Apache 2.0 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | load( 6 | "//:tools/build/tensorflow_io.bzl", 7 | "tf_io_copts", 8 | ) 9 | 10 | cc_library( 11 | name = "gs", 12 | srcs = [ 13 | "cleanup.h", 14 | "expiring_lru_cache.h", 15 | "file_system_plugin_gs.cc", 16 | "file_system_plugin_gs.h", 17 | "gcs_filesystem.cc", 18 | "gcs_helper.cc", 19 | "gcs_helper.h", 20 | "ram_file_block_cache.cc", 21 | "ram_file_block_cache.h", 22 | ] + select({ 23 | "@bazel_tools//src/conditions:windows": [ 24 | "@local_config_tf//:stub/libtensorflow_framework.lib", 25 | ], 26 | "//conditions:default": [ 27 | "@local_config_tf//:stub/libtensorflow_framework.so", 28 | ], 29 | }), 30 | copts = tf_io_copts(), 31 | linkstatic = True, 32 | deps = [ 33 | "@com_github_googleapis_google_cloud_cpp//:storage_client", 34 | "@com_google_absl//absl/base:core_headers", 35 | "@com_google_absl//absl/strings", 36 | "@com_google_absl//absl/synchronization", 37 | "@com_google_absl//absl/types:variant", 38 | "@local_config_tf//:tf_c_header_lib", 39 | "@local_config_tf//:tf_tsl_header_lib", 40 | "@local_tsl//tsl/c:tsl_status", 41 | ], 42 | alwayslink = 1, 43 | ) 44 | 45 | cc_binary( 46 | name = "python/ops/libtensorflow_io_gcs_filesystem.so", 47 | copts = tf_io_copts(), 48 | linkshared = 1, 49 | deps = [ 50 | "//tensorflow_io_gcs_filesystem/core:gs", 51 | ], 52 | ) 53 | -------------------------------------------------------------------------------- /docs/overview.md: -------------------------------------------------------------------------------- 1 |
2 |

3 |
4 | 5 | ----------------- 6 | 7 | # TensorFlow I/O 8 | 9 | TensorFlow I/O is an extension package to Tensorflow, which encompasses io support for 10 | a collection of file systems and file formats that are not available in TensorFlow's built-in support. 11 | Integrations with many systems and cloud vendors include (but not limited to): 12 | 13 | - Prometheus 14 | - Apache Kafka 15 | - Apache Ignite 16 | - Google Cloud BigQuery 17 | - Google Cloud PubSub 18 | - AWS Kinesis 19 | - Microsoft Azure Storage 20 | - Alibaba Cloud OSS etc. 21 | 22 | ## Community 23 | 24 | * SIG IO [Google Group](https://groups.google.com/a/tensorflow.org/forum/#!forum/io) and mailing list: [io@tensorflow.org](io@tensorflow.org) 25 | * SIG IO [Monthly Meeting Notes](https://docs.google.com/document/d/1CB51yJxns5WA4Ylv89D-a5qReiGTC0GYum6DU-9nKGo/edit) 26 | * Gitter room: [tensorflow/sig-io](https://gitter.im/tensorflow/sig-io) 27 | 28 | ## More Information 29 | 30 | * [TensorFlow with Apache Arrow Datasets](https://medium.com/tensorflow/tensorflow-with-apache-arrow-datasets-cdbcfe80a59f) - [Bryan Cutler](https://github.com/BryanCutler) 31 | * [How to build a custom Dataset for Tensorflow](https://towardsdatascience.com/how-to-build-a-custom-dataset-for-tensorflow-1fe3967544d8) - [Ivelin Ivanov](https://github.com/ivelin) 32 | * [TensorFlow on Apache Ignite](https://medium.com/tensorflow/tensorflow-on-apache-ignite-99f1fc60efeb) - [Anton Dmitriev](https://github.com/dmitrievanthony) 33 | 34 | ## License 35 | 36 | [Apache License 2.0](https://github.com/tensorflow/io/blob/master/LICENSE) 37 | -------------------------------------------------------------------------------- /tests/test_dicom/dicom_samples.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | # change directory to test_dicom folder 18 | cd "${0%/*}" 19 | 20 | set -e 21 | set -o pipefail 22 | 23 | 24 | if [ "$#" -ne 1 ]; then 25 | echo "Usage: $0 download | extract | clean_{all,dcm}" >&2 26 | exit 1 27 | fi 28 | 29 | if [ "$1" == "download" ]; then 30 | input="dicom_sample_source.txt" 31 | 32 | while IFS=' ' read -r fname url 33 | do 34 | echo "Downloading $fname" 35 | curl -sL -o $fname $url 36 | done < "$input" 37 | elif [ "$1" == "extract" ]; then 38 | input="dicom_sample_source.txt" 39 | 40 | while IFS=' ' read -r fname url 41 | do 42 | echo "Extracting $fname" 43 | gunzip -c $fname > "${fname%.*}.dcm" 44 | done < "$input" 45 | elif [ "$1" == "clean_all" ]; then 46 | rm -f *.dcm 47 | rm -f *.gz 48 | elif [ "$1" == "clean_dcm" ]; then 49 | rm -f *.dcm 50 | else 51 | echo "Usage: $0 download | extract | clean_{all,dcm}" >&2 52 | exit 1 53 | fi 54 | -------------------------------------------------------------------------------- /tools/docker/devel.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tensorflow/tensorflow:custom-op-ubuntu16 2 | 3 | RUN rm -f /etc/apt/sources.list.d/jonathonf-ubuntu-python-3_7-xenial.list && apt-get update && \ 4 | apt-get install -y \ 5 | git \ 6 | gcc \ 7 | g++ \ 8 | gdb \ 9 | make \ 10 | patch \ 11 | curl \ 12 | nano \ 13 | unzip \ 14 | ffmpeg \ 15 | dnsutils 16 | 17 | RUN curl -sSOL https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/bazelisk-linux-amd64 && \ 18 | mv bazelisk-linux-amd64 /usr/local/bin/bazel && \ 19 | chmod +x /usr/local/bin/bazel 20 | 21 | ARG CONDA_OS=Linux 22 | 23 | # Miniconda - Python 3.7, 64-bit, x86, latest 24 | RUN curl -sL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o mconda-install.sh && \ 25 | bash -x mconda-install.sh -b -p miniconda && \ 26 | rm mconda-install.sh 27 | 28 | ENV PATH="/miniconda/bin:$PATH" 29 | 30 | ARG CONDA_ADD_PACKAGES="" 31 | 32 | RUN conda create -y -q -n tfio-dev python=3.7 ${CONDA_ADD_PACKAGES} 33 | 34 | ARG ARROW_VERSION=0.16.0 35 | 36 | RUN echo ". /miniconda/etc/profile.d/conda.sh" >> ~/.bashrc && \ 37 | echo "source activate tfio-dev" >> ~/.bashrc 38 | 39 | ARG PIP_ADD_PACKAGES="" 40 | 41 | RUN /bin/bash -c "source activate tfio-dev && python -m pip install \ 42 | avro-python3 \ 43 | python-snappy \ 44 | parameterized \ 45 | pytest \ 46 | pytest-benchmark \ 47 | pylint \ 48 | boto3 \ 49 | google-cloud-pubsub==0.39.1 \ 50 | google-cloud-bigquery-storage==1.1.0 \ 51 | pyarrow==${ARROW_VERSION} \ 52 | pandas \ 53 | scipy \ 54 | fastavro \ 55 | gast==0.2.2 \ 56 | ${PIP_ADD_PACKAGES} \ 57 | " 58 | 59 | ENV TFIO_DATAPATH=bazel-bin 60 | -------------------------------------------------------------------------------- /third_party/oss_c_sdk.BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # Apache 2.0 4 | 5 | cc_library( 6 | name = "aliyun_oss_c_sdk", 7 | srcs = [ 8 | "oss_c_sdk/aos_buf.c", 9 | "oss_c_sdk/aos_buf.h", 10 | "oss_c_sdk/aos_crc64.c", 11 | "oss_c_sdk/aos_crc64.h", 12 | "oss_c_sdk/aos_fstack.c", 13 | "oss_c_sdk/aos_fstack.h", 14 | "oss_c_sdk/aos_http_io.c", 15 | "oss_c_sdk/aos_http_io.h", 16 | "oss_c_sdk/aos_list.h", 17 | "oss_c_sdk/aos_log.c", 18 | "oss_c_sdk/aos_status.c", 19 | "oss_c_sdk/aos_string.c", 20 | "oss_c_sdk/aos_transport.c", 21 | "oss_c_sdk/aos_transport.h", 22 | "oss_c_sdk/aos_util.c", 23 | "oss_c_sdk/oss_auth.c", 24 | "oss_c_sdk/oss_bucket.c", 25 | "oss_c_sdk/oss_define.c", 26 | "oss_c_sdk/oss_define.h", 27 | "oss_c_sdk/oss_live.c", 28 | "oss_c_sdk/oss_multipart.c", 29 | "oss_c_sdk/oss_object.c", 30 | "oss_c_sdk/oss_resumable.c", 31 | "oss_c_sdk/oss_resumable.h", 32 | "oss_c_sdk/oss_util.c", 33 | "oss_c_sdk/oss_xml.c", 34 | "oss_c_sdk/oss_xml.h", 35 | ], 36 | hdrs = [ 37 | "oss_c_sdk/aos_define.h", 38 | "oss_c_sdk/aos_log.h", 39 | "oss_c_sdk/aos_status.h", 40 | "oss_c_sdk/aos_string.h", 41 | "oss_c_sdk/aos_util.h", 42 | "oss_c_sdk/oss_api.h", 43 | "oss_c_sdk/oss_auth.h", 44 | "oss_c_sdk/oss_util.h", 45 | ], 46 | includes = [ 47 | "oss_c_sdk", 48 | ], 49 | deps = [ 50 | "@curl", 51 | "@libapr1", 52 | "@libaprutil1", 53 | "@mxml", 54 | ], 55 | ) 56 | -------------------------------------------------------------------------------- /tensorflow_io/python/experimental/io_tensor.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """IOTensor""" 16 | 17 | import tensorflow as tf 18 | from tensorflow_io.python.ops import io_tensor 19 | from tensorflow_io.python.experimental import openexr_io_tensor_ops 20 | 21 | 22 | class IOTensor(io_tensor.IOTensor): 23 | """IOTensor""" 24 | 25 | # ============================================================================= 26 | # Factory Methods 27 | # ============================================================================= 28 | 29 | @classmethod 30 | def from_exr(cls, filename, **kwargs): 31 | """Creates an `IOTensor` from a OpenEXR file. 32 | 33 | Args: 34 | filename: A string, the filename of a OpenEXR file. 35 | name: A name prefix for the IOTensor (optional). 36 | 37 | Returns: 38 | A `IOTensor`. 39 | 40 | """ 41 | with tf.name_scope(kwargs.get("name", "IOFromOpenEXR")): 42 | return openexr_io_tensor_ops.EXRIOTensor(filename, internal=True) 43 | --------------------------------------------------------------------------------