Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/check-java-bridge-licensing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ jobs:
- name: Build
run: |
set -o pipefail
cd pypaimon/py4j/paimon-python-java-bridge
cd paimon-python-java-bridge
mvn clean deploy ${{ env.MVN_COMMON_OPTIONS }} -DskipTests \
-DaltDeploymentRepository=validation_repository::default::file:${{ env.MVN_VALIDATION_DIR }} \
| tee ${{ env.MVN_BUILD_OUTPUT_FILE }}

- name: Check licensing
run: |
cd pypaimon/py4j/paimon-python-java-bridge
cd paimon-python-java-bridge
mvn ${{ env.MVN_COMMON_OPTIONS }} exec:java@check-licensing -N \
-Dexec.args="${{ env.MVN_BUILD_OUTPUT_FILE }} $(pwd) ${{ env.MVN_VALIDATION_DIR }}" \
-Dlog4j.configurationFile=file://$(pwd)/tools/ci/log4j.properties
Expand Down
52 changes: 52 additions & 0 deletions .github/workflows/create-source-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: Create Source Release

on:
workflow_dispatch:

jobs:
build:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Setup GPG
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
run: |
mkdir -p ~/.gnupg
chmod 700 ~/.gnupg
echo $GPG_PRIVATE_KEY | base64 --decode | gpg --batch --import --yes
echo "use-agent" >> ~/.gnupg/gpg.conf
echo "pinentry-program /usr/bin/pinentry" >> ~/.gnupg/gpg-agent.conf
echo "allow-loopback-pinentry" >> ~/.gnupg/gpg-agent.conf

- name: Create source release
env:
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
run: |
mkdir -p output
chmod +x tools/releasing/create_source_release.sh
OUTPUT_DIR=output GPG_PASSPHRASE=$GPG_PASSPHRASE tools/releasing/create_source_release.sh

- name: Upload source release
uses: actions/upload-artifact@v4
with:
name: source-release
path: output/*
8 changes: 1 addition & 7 deletions .github/workflows/paimon-python-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,8 @@ jobs:
with:
java-version: ${{ env.JDK_VERSION }}
distribution: 'adopt'
- name: Set up hadoop dependency
run: |
mkdir -p ${{ github.workspace }}/temp
curl -L -o ${{ github.workspace }}/temp/bundled-hadoop.jar \
https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-10.0/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar

- name: Run lint-python.sh
env:
_PYPAIMON_HADOOP_CLASSPATH: ${{ github.workspace }}/temp/bundled-hadoop.jar
run: |
chmod +x dev/lint-python.sh
./dev/lint-python.sh
23 changes: 23 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################

global-exclude *.py[cod] __pycache__ .DS_Store
recursive-include deps/jars *.jar
include README.md
include LICENSE
include NOTICE
10 changes: 0 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,6 @@ We provide script to check codes.
./dev/lint-python.sh -h # run this to see more usages
```

## Build

We provide script to build wheel.

```shell
./dev/build-wheels.sh
```

The target wheel is under `dist/`

# Usage

See Apache Paimon Python API [Doc](https://paimon.apache.org/docs/master/program-api/python-api/).
Expand Down
56 changes: 0 additions & 56 deletions dev/build-wheels.sh

This file was deleted.

8 changes: 6 additions & 2 deletions dev/lint-python.sh
Original file line number Diff line number Diff line change
Expand Up @@ -577,8 +577,9 @@ function tox_check() {
# Ensure the permission of the scripts set correctly
chmod +x $PAIMON_PYTHON_DIR/dev/*

# tox runs codes in virtual env, set var to avoid error
export _PYPAIMON_TOX_TEST="true"
# dummy jar needed by setup.py
mkdir -p $PAIMON_PYTHON_DIR/deps/jars
touch $PAIMON_PYTHON_DIR/deps/jars/dummy.jar

if [[ -n "$GITHUB_ACTION" ]]; then
# Run tests in all versions triggered by a Git push (tests aren't so many currently)
Expand All @@ -596,6 +597,9 @@ function tox_check() {
$TOX_PATH -vv -c $PAIMON_PYTHON_DIR/tox.ini -e ${ENV_LIST[$index]} --recreate 2>&1 | tee -a $LOG_FILE
fi

# delete dummy jar
rm -rf $PAIMON_PYTHON_DIR/deps

TOX_RESULT=$((grep -c "congratulations :)" "$LOG_FILE") 2>&1)
if [ $TOX_RESULT -eq '0' ]; then
print_function "STAGE" "tox checks... [FAILED]"
Expand Down
Binary file not shown.
Binary file not shown.
18 changes: 11 additions & 7 deletions pypaimon/py4j/gateway_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# limitations under the License.
################################################################################

import importlib
import importlib.resources
import os
import platform
import signal
Expand Down Expand Up @@ -74,17 +74,21 @@ def preexec_func():
stdin=PIPE, stderr=PIPE, preexec_fn=preexec_fn, env=env)


_JAVA_IMPL_MODULE = 'pypaimon.py4j'
_JAVA_DEPS = 'java_dependencies'
_JAVA_BRIDGE = 'paimon-python-java-bridge'
_JAVA_DEPS_PACKAGE = 'pypaimon.jars'


def _get_classpath(env):
classpath = []

module = importlib.import_module(_JAVA_IMPL_MODULE)
builtin_java_bridge = os.path.join(*module.__path__, _JAVA_DEPS, _JAVA_BRIDGE + '.jar')
classpath.append(builtin_java_bridge)
# note that jars are not packaged in test
test_mode = os.environ.get(constants.PYPAIMON4J_TEST_MODE)
if not test_mode or test_mode.lower() != "true":
jars = importlib.resources.files(_JAVA_DEPS_PACKAGE)
one_jar = next(iter(jars.iterdir()), None)
if not one_jar:
raise ValueError("Haven't found necessary python-java-bridge jar, this is unexpected.")
builtin_java_classpath = os.path.join(os.path.dirname(str(one_jar)), '*')
classpath.append(builtin_java_classpath)

# user defined
if constants.PYPAIMON_JAVA_CLASSPATH in env:
Expand Down
32 changes: 32 additions & 0 deletions pypaimon/py4j/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,35 @@
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################

import os
import shutil
import tempfile
import unittest

from pypaimon.py4j import constants, Catalog


class PypaimonTestBase(unittest.TestCase):
"""
Base class for unit tests.
"""

@classmethod
def setUpClass(cls):
os.environ[constants.PYPAIMON4J_TEST_MODE] = 'true'

this_dir = os.path.abspath(os.path.dirname(__file__))
project_dir = os.path.dirname(os.path.dirname(os.path.dirname(this_dir)))
deps = os.path.join(project_dir, "dev/test_deps/*")
os.environ[constants.PYPAIMON_HADOOP_CLASSPATH] = deps

cls.tempdir = tempfile.mkdtemp()
cls.warehouse = os.path.join(cls.tempdir, 'warehouse')
cls.catalog = Catalog.create({'warehouse': cls.warehouse})
cls.catalog.create_database('default', False)

@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tempdir, ignore_errors=True)
del os.environ[constants.PYPAIMON4J_TEST_MODE]
25 changes: 3 additions & 22 deletions pypaimon/py4j/tests/test_data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,43 +16,24 @@
# limitations under the License.
################################################################################

import os
import random
import shutil
import string
import tempfile
import pyarrow as pa
import unittest

from pypaimon import Schema
from pypaimon.py4j import Catalog
from pypaimon.py4j.tests import utils
from pypaimon.py4j.tests import PypaimonTestBase
from pypaimon.py4j.util import java_utils
from setup_utils import java_setuputils


class DataTypesTest(unittest.TestCase):
class DataTypesTest(PypaimonTestBase):

@classmethod
def setUpClass(cls):
java_setuputils.setup_java_bridge()
cls.hadoop_path = tempfile.mkdtemp()
utils.setup_hadoop_bundle_jar(cls.hadoop_path)
cls.warehouse = tempfile.mkdtemp()
super().setUpClass()
cls.simple_pa_schema = pa.schema([
('f0', pa.int32()),
('f1', pa.string())
])
cls.catalog = Catalog.create({'warehouse': cls.warehouse})
cls.catalog.create_database('default', False)

@classmethod
def tearDownClass(cls):
java_setuputils.clean()
if os.path.exists(cls.hadoop_path):
shutil.rmtree(cls.hadoop_path)
if os.path.exists(cls.warehouse):
shutil.rmtree(cls.warehouse)

def test_int(self):
pa_schema = pa.schema([
Expand Down
Loading