Skip to content

Commit

Permalink
release v0.8.0-alpha (#1339)
Browse files Browse the repository at this point in the history
  • Loading branch information
cgardens committed Dec 16, 2020
1 parent f5f58e2 commit 01cdeaf
Show file tree
Hide file tree
Showing 17 changed files with 41 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
VERSION=0.7.2-alpha
VERSION=0.8.0-alpha
DATABASE_USER=docker
DATABASE_PASSWORD=docker
DATABASE_DB=airbyte
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"destinationDefinitionId": "22f6c74f-5699-40ff-833c-4a879ea40133",
"name": "BigQuery",
"dockerRepository": "airbyte/destination-bigquery",
"dockerImageTag": "0.1.10",
"dockerImageTag": "0.1.11",
"documentationUrl": "https://hub.docker.com/r/airbyte/integration-singer-bigquery-destination"
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
- destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
name: BigQuery
dockerRepository: airbyte/destination-bigquery
dockerImageTag: 0.1.10
dockerImageTag: 0.1.11
documentationUrl: https://hub.docker.com/r/airbyte/integration-singer-bigquery-destination
- destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
name: Snowflake
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar

RUN tar xf ${APPLICATION}.tar --strip-components=1

LABEL io.airbyte.version=0.1.10
LABEL io.airbyte.version=0.1.11
LABEL io.airbyte.name=airbyte/destination-bigquery
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,10 @@
import io.airbyte.scheduler.persistence.JobPersistence;
import java.io.IOException;
import java.time.Instant;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -52,24 +53,35 @@ public JobRetrier(JobPersistence jobPersistence, Supplier<Instant> timeSupplier)
public void run() {
LOGGER.info("Running job-retrier...");

listFailedJobs()
final AtomicInteger failedJobs = new AtomicInteger();
final AtomicInteger retriedJobs = new AtomicInteger();
final List<Job> incompleteJobs = incompleteJobs();

incompleteJobs
.forEach(job -> {
LOGGER.info("weeee");
if (hasReachedMaxAttempt(job)) {
failJob(job);
failedJobs.incrementAndGet();
return;
}

if (shouldRetry(job)) {
retriedJobs.incrementAndGet();
resetJob(job);
}
});

LOGGER.info("Completed job-retrier...");
LOGGER.info("Completed Job-Retrier...");
LOGGER.info("Job-Retrier Summary. Incomplete jobs: {}, Job set to retry: {}, Jobs set to failed: {}",
incompleteJobs.size(),
failedJobs.get(),
retriedJobs.get());
}

private Stream<Job> listFailedJobs() {
private List<Job> incompleteJobs() {
try {
return persistence.listJobsWithStatus(JobConfig.ConfigType.SYNC, JobStatus.INCOMPLETE).stream();
return persistence.listJobsWithStatus(JobConfig.ConfigType.SYNC, JobStatus.INCOMPLETE);
} catch (IOException e) {
throw new RuntimeException("failed to fetch failed jobs", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiPredicate;
import java.util.stream.Collectors;
import org.slf4j.Logger;
Expand Down Expand Up @@ -80,21 +81,25 @@ public void run() {

scheduleSyncJobs();

LOGGER.info("Completed job-scheduler...");
LOGGER.info("Completed Job-Scheduler...");
} catch (Throwable e) {
LOGGER.error("Job Scheduler Error", e);
}
}

private void scheduleSyncJobs() throws IOException {
for (StandardSync connection : getAllActiveConnections()) {
final AtomicInteger jobsScheduled = new AtomicInteger();
final List<StandardSync> activeConnections = getAllActiveConnections();

for (StandardSync connection : activeConnections) {
final Optional<Job> previousJobOptional = jobPersistence.getLastSyncJob(connection.getConnectionId());
final StandardSyncSchedule standardSyncSchedule = getStandardSyncSchedule(connection);

if (scheduleJobPredicate.test(previousJobOptional, standardSyncSchedule)) {
jobFactory.create(connection.getConnectionId());
}
}
LOGGER.info("Job-Scheduler Summary. Active connections: {}, Jobs scheduler: {}", activeConnections.size(), jobsScheduled.get());
}

private StandardSyncSchedule getStandardSyncSchedule(StandardSync connection) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ public void run() {
oldestPendingJob.ifPresent(job -> {
trackSubmission(job);
submitJob(job);
LOGGER.info("Job-Submitter Summary. Submitted job with scope {}", job.getScope());
});

LOGGER.info("Completed job-submitter...");
LOGGER.info("Completed Job-Submitter...");
} catch (Throwable e) {
LOGGER.error("Job Submitter Error", e);
}
Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/bigquery.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Each stream will be output into its own table in BigQuery. Each table will conta
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |

## Getting started

Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/local-csv.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Each stream will be output into its own file. Each file will contain 3 columns:
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental - Append Sync | Yes | |

#### Performance considerations

Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/postgres.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ Each stream will be output into its own table in Postgres. Each table will conta
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |

## Getting started

Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/redshift.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |

#### Target Database

Expand Down
2 changes: 2 additions & 0 deletions docs/integrations/destinations/snowflake.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ Each stream will be output into its own table in Snowflake. Each table will cont
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |


## Getting started

Expand Down
2 changes: 1 addition & 1 deletion docs/integrations/sources/intercom.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ If there are more endpoints you'd like Airbyte to support, please [create an iss
| Feature | Supported? |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| SSL connection | Yes |

Expand Down
2 changes: 1 addition & 1 deletion docs/integrations/sources/mixpanel.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ If there are more endpoints you'd like Airbyte to support, please [create an iss
| Feature | Supported? |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| SSL connection | Yes |

Expand Down
2 changes: 1 addition & 1 deletion docs/integrations/sources/mysql.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ If you do not see a type in this list, assume that it is coerced into a string.
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| Logical Replication \(WAL\) | No |
| SSL Support | Yes |
Expand Down
2 changes: 1 addition & 1 deletion docs/integrations/sources/postgres.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ Postgres data types are mapped to the following data types when synchronizing da
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| Logical Replication \(WAL\) | No |
| SSL Support | Yes |
Expand Down
2 changes: 1 addition & 1 deletion docs/integrations/sources/twilio.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ This Source is capable of syncing the following core Streams:
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | yes | |
| Incremental Sync | no | |
| Incremental - Append Sync | Yes |

### Performance considerations

Expand Down

0 comments on commit 01cdeaf

Please sign in to comment.