Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
d5398fd
adds dependabot mangement for dockerfiles
thoniTUB Dec 10, 2025
c9a2342
Merge branch 'develop' into feat/dependabot-dockerfile
thoniTUB Dec 10, 2025
eeedd8f
feat: adds dependabot mangement for dockerfiles (#3817)
thoniTUB Dec 10, 2025
579982c
update matching-stats don't resolve unneeded objects
thoniTUB Dec 10, 2025
0ee5440
Bump actions/cache from 4 to 5
dependabot[bot] Dec 15, 2025
ab380e4
Bump actions/cache from 4 to 5 (#3825)
thoniTUB Dec 15, 2025
990d01a
Merge branch 'develop' into feat/improve-matching-stats
thoniTUB Dec 15, 2025
ccc3ad9
Bump alpine/git from v2.49.1 to 2.52.0
dependabot[bot] Dec 22, 2025
1577083
Bump qs, express and body-parser in /frontend
dependabot[bot] Jan 1, 2026
c336f2c
Bump qs, express and body-parser in /frontend (#3828)
thoniTUB Jan 5, 2026
1817b4e
Merge branch 'develop' into dependabot/docker/alpine/git-2.52.0
thoniTUB Jan 5, 2026
8a267e9
Merge branch 'develop' into feat/improve-matching-stats
thoniTUB Jan 5, 2026
c9a596f
Bump alpine/git from v2.49.1 to 2.52.0 (#3827)
thoniTUB Jan 5, 2026
411e14e
Merge branch 'develop' into feat/improve-matching-stats
thoniTUB Jan 6, 2026
7153d30
fix: update matching-stats don't resolve unneeded objects (#3824)
thoniTUB Jan 6, 2026
3f9ef7b
Bump react-router and react-router-dom in /frontend
dependabot[bot] Jan 9, 2026
399d016
Bump react-router and react-router-dom in /frontend (#3829)
thoniTUB Jan 12, 2026
be99271
Bump qs and @cypress/request
dependabot[bot] Jan 12, 2026
ee186d1
Bump qs and @cypress/request (#3830)
thoniTUB Jan 12, 2026
db07088
adds listener to ShardResult submission to catch errors
thoniTUB Jan 19, 2026
542e86f
truncate trace
thoniTUB Jan 19, 2026
3c770fc
catch errors in mina filter chain
thoniTUB Jan 20, 2026
d227075
clean up result and failure handlers
thoniTUB Jan 20, 2026
f22f485
remove unused method
thoniTUB Jan 20, 2026
94e9e52
Apply suggestions from code review
thoniTUB Jan 20, 2026
047b6c9
fix: adds listener to ShardResult submission to catch errors (#3831)
thoniTUB Jan 20, 2026
c843b97
Bump lodash from 4.17.21 to 4.17.23
dependabot[bot] Jan 21, 2026
8bd4631
Bump lodash from 4.17.21 to 4.17.23 (#3833)
thoniTUB Jan 22, 2026
c261870
Bump cypress-io/github-action from 6 to 7
dependabot[bot] Jan 26, 2026
69db5da
Bump cypress-io/github-action from 6 to 7 (#3836)
thoniTUB Jan 26, 2026
86b1247
deduplicates loaded entities for entity-history Navigation (#3838)
awildturtok Feb 2, 2026
87b3f0e
removes time-editor to use editorv2 by default (#3837)
awildturtok Feb 4, 2026
3451a8d
Merge branch 'master' into release
thoniTUB Feb 5, 2026
ede81eb
add token to silently failing action
thoniTUB Feb 5, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 13 additions & 4 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@ updates:
- package-ecosystem: maven
directory: "/"
schedule:
interval: daily
time: "04:00"
interval: "weekly"
open-pull-requests-limit: 10
assignees:
- awildturtok
Expand All @@ -16,8 +15,7 @@ updates:
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
time: "04:00"
interval: "weekly"
open-pull-requests-limit: 0
assignees:
- Kadrian
Expand All @@ -34,3 +32,14 @@ updates:
labels:
- gh-actions
- dependencies
- package-ecosystem: "docker"
# Should match both dockerfiles according to https://github.com/dependabot/dependabot-core/commit/4d797fd82bbc9e99c80c0deea72ab698ceb64320
directory: "/"
schedule:
interval: "weekly"
assignees:
- awildturtok
- thoniTUB
labels:
- docker
- dependencies
2 changes: 2 additions & 0 deletions .github/workflows/prepare-merge-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ jobs:
id: check_pr_count
run: >
echo "pr_count=$(gh pr list --head release --limit 1 --base $MAIN_BRANCH_NAME --state open --json state --jq 'length')" >> "$GITHUB_OUTPUT"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Create pull-request
if: ${{ steps.check_pr_count.outputs.pr_count == 0 }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release_backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
java-version: '21'
overwrite-settings: false
- name: Cache local Maven repository
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/run_autodoc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Cache local Maven repository
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test_backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
with:
submodules: true
- name: Cache local Maven repository
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
Expand Down Expand Up @@ -56,7 +56,7 @@ jobs:
with:
submodules: true
- name: Cache local Maven repository
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test_cypress.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Cache local Maven repository
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
Expand Down Expand Up @@ -52,7 +52,7 @@ jobs:
- name: Cypress run
# This is a preconfigured action, maintained by cypress, to run e2e tests
# https://github.com/cypress-io/github-action
uses: cypress-io/github-action@v6
uses: cypress-io/github-action@v7
with:
working-directory: .
start: bash ./scripts/run_e2e_all.sh
Expand Down
2 changes: 1 addition & 1 deletion backend.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Version Extractor
FROM alpine/git:v2.49.1 AS version-extractor
FROM alpine/git:2.52.0 AS version-extractor

WORKDIR /app
COPY .git .
Expand Down Expand Up @@ -37,14 +37,14 @@
RUN ln -s /usr/lib/libfontconfig.so.1 /usr/lib/libfontconfig.so && \
ln -s /lib/libuuid.so.1 /usr/lib/libuuid.so.1 && \
ln -s /lib/libc.musl-x86_64.so.1 /usr/lib/libc.musl-x86_64.so.1
ENV LD_LIBRARY_PATH /usr/lib

Check warning on line 40 in backend.Dockerfile

View workflow job for this annotation

GitHub Actions / backend

Legacy key/value format with whitespace separator should not be used

LegacyKeyValueFormat: "ENV key=value" should be used instead of legacy "ENV key value" format More info: https://docs.docker.com/go/dockerfile/rule/legacy-key-value-format/

WORKDIR /app
COPY --from=builder /app/executable/target/executable*jar ./conquery.jar

ENV CLUSTER_PORT=${CLUSTER_PORT:-8082}

Check warning on line 45 in backend.Dockerfile

View workflow job for this annotation

GitHub Actions / backend

Variables should be defined before their use

UndefinedVar: Usage of undefined variable '$CLUSTER_PORT' More info: https://docs.docker.com/go/dockerfile/rule/undefined-var/
ENV ADMIN_PORT=${ADMIN_PORT:-8081}

Check warning on line 46 in backend.Dockerfile

View workflow job for this annotation

GitHub Actions / backend

Variables should be defined before their use

UndefinedVar: Usage of undefined variable '$ADMIN_PORT' More info: https://docs.docker.com/go/dockerfile/rule/undefined-var/
ENV API_PORT=${API_PORT:-8080}

Check warning on line 47 in backend.Dockerfile

View workflow job for this annotation

GitHub Actions / backend

Variables should be defined before their use

UndefinedVar: Usage of undefined variable '$API_PORT' More info: https://docs.docker.com/go/dockerfile/rule/undefined-var/

RUN mkdir /app/logs
VOLUME /app/logs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.bakdata.conquery.models.worker.Worker;
import com.bakdata.conquery.models.worker.WorkerInformation;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import io.dropwizard.core.setup.Environment;
import io.dropwizard.lifecycle.Managed;
import io.dropwizard.util.Duration;
Expand Down Expand Up @@ -174,8 +175,10 @@ public void sessionIdle(IoSession session, IdleStatus status) {
}

@Override
public void exceptionCaught(IoSession session, Throwable cause) {
log.error("Exception caught", cause);
public void exceptionCaught(IoSession session, Throwable cause) throws Exception {
// Rethrow
Throwables.throwIfInstanceOf(cause, Exception.class);
throw new RuntimeException("Encountered problem in %s".formatted(session.toString()), cause);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,19 @@ public String getMessageTemplate(ErrorMessages errorMessages) {
}
}

/**
* Result size was too large
*/
@CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_RESULT_SIZE")
@RequiredArgsConstructor(onConstructor_ = {@JsonCreator})
public static class ExecutionProcessingResultSizeError extends ConqueryError {

@Override
public String getMessageTemplate(ErrorMessages errorMessages) {
return errorMessages.resultSizeTooLarge();
}
}

@CPSType(base = ConqueryError.class, id = "CQ_EXECUTION_NO_SECONDARY_ID")
@RequiredArgsConstructor(onConstructor_ = {@JsonCreator})
public static class NoSecondaryIdSelectedError extends ConqueryError {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ public interface ErrorMessages {
@De("Die Anfrage lief zu lange und wurde abgebrochen.")
String executionTimeout();

@En("Query result was too large.")
@De("Das Anfrageergebnis ist zu groß.")
String resultSizeTooLarge();

@En("No secondaryId could be selected.")
@De("Die ausgewählte Analyseebenen konnte in keinem der ausgewählten Konzepten gefunden werden.")
String noSecondaryIdSelected();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,8 @@ public void react(Worker worker) throws Exception {
query.createQueryPlan(new QueryPlanContext(worker.getStorage(), queryExecutor.getSecondaryIdSubPlanLimit()));
}
catch (Exception e) {
ConqueryError err = asConqueryError(e);
log.warn("Failed to create query plans for {}.", formId, err);
queryExecutor.sendFailureToManagerNode(result, err, worker);
log.warn("Failed to create query plans for {}.", formId, e);
queryExecutor.sendFailureToManagerNode(e, formId);
return;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,8 @@ public void react(Worker worker) throws Exception {
log.trace("Created query plan in {}", stopwatch);
}
catch (Exception e) {
ConqueryError err = asConqueryError(e);
log.warn("Failed to create query plans for {}.", id, err);
queryExecutor.sendFailureToManagerNode(result, err, worker);
log.warn("Failed to create query plans for {}.", id, e);
queryExecutor.sendFailureToManagerNode(e, id);
return;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept;
import com.bakdata.conquery.models.events.Bucket;
import com.bakdata.conquery.models.events.CBlock;
import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId;
import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId;
import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId;
import com.bakdata.conquery.models.jobs.Job;
Expand Down Expand Up @@ -73,13 +74,9 @@ public void execute() throws Exception {
subJobs =
concepts.stream()
.collect(Collectors.toMap(Functions.identity(),
concept -> CompletableFuture.runAsync(() -> {
final Concept<?> resolved = concept.resolve();
final Map<ConceptElementId<?>, MatchingStats.Entry>
matchingStats =
new HashMap<>(resolved.countElements());
conceptId -> CompletableFuture.runAsync(() -> {

calculateConceptMatches(resolved, matchingStats, worker);
Map<ConceptElementId<?>, MatchingStats.Entry> matchingStats = calculateConceptMatches(conceptId, worker);

final WriteFuture writeFuture = worker.send(new UpdateElementMatchingStats(worker.getInfo().getId(), matchingStats));

Expand Down Expand Up @@ -127,18 +124,24 @@ public String getLabel() {
return String.format("Calculate Matching Stats for %s", worker.getInfo().getDataset());
}

private static void calculateConceptMatches(Concept<?> concept, Map<ConceptElementId<?>, MatchingStats.Entry> results, Worker worker) {
log.debug("BEGIN calculating for `{}`", concept.getId());
private static Map<ConceptElementId<?>, MatchingStats.Entry> calculateConceptMatches(ConceptId conceptId, Worker worker) {

try(Stream<CBlock> allCBlocks = worker.getStorage().getAllCBlocks()) {
Concept<?> concept = conceptId.resolve();

final Map<ConceptElementId<?>, MatchingStats.Entry> matchingStats = new HashMap<>(concept.countElements());

log.debug("BEGIN calculating for `{}`", conceptId);

try (Stream<CBlockId> allCBlocks = worker.getStorage().getAllCBlockIds()) {

for (CBlock cBlock : allCBlocks.toList()) {
for (CBlockId cBlockId : allCBlocks.toList()) {

if (!cBlock.getConnector().getConcept().equals(concept.getId())) {
if (!cBlockId.getConnector().getConcept().equals(conceptId)) {
continue;
}

try {
CBlock cBlock = cBlockId.resolve();
final Bucket bucket = cBlock.getBucket().resolve();
final Table table = bucket.getTable().resolve();

Expand All @@ -152,7 +155,7 @@ private static void calculateConceptMatches(Concept<?> concept, Map<ConceptEleme


if (!(concept instanceof TreeConcept) || localIds == null) {
results.computeIfAbsent(concept.getId(), (ignored) -> new MatchingStats.Entry()).addEvent(table, bucket, event, entity);
matchingStats.computeIfAbsent(conceptId, (ignored) -> new MatchingStats.Entry()).addEvent(table, bucket, event, entity);
continue;
}

Expand All @@ -163,24 +166,24 @@ private static void calculateConceptMatches(Concept<?> concept, Map<ConceptEleme
ConceptElement<?> element = ((TreeConcept) concept).getElementByLocalIdPath(localIds);

while (element != null) {
results.computeIfAbsent(element.getId(), (ignored) -> new MatchingStats.Entry())
.addEvent(table, bucket, event, entity);
matchingStats.computeIfAbsent(element.getId(), (ignored) -> new MatchingStats.Entry())
.addEvent(table, bucket, event, entity);
element = element.getParent();
}
}
}

}
catch (Exception e) {
log.error("Failed to collect the matching stats for {}", cBlock, e);
log.error("Failed to collect the matching stats for {}", cBlockId, e);
}
}
}

log.trace("DONE calculating for `{}`", concept.getId());
log.trace("DONE calculating for `{}`", conceptId);

return matchingStats;
}

}


}
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,19 @@ public void doCancelQuery(ManagedExecutionId executionId) {
@SneakyThrows
public <R extends ShardResult, E extends ManagedExecution & InternalExecution> void handleQueryResult(R result, E execution) {


log.debug("Received Result[size={}] for Query[{}]", result.getResults().size(), result.getQueryId());
log.trace("Received Result\n{}", result.getResults());

if (execution == null) {
log.debug("Ignoring result {} because the corresponding execution was 'null' (probably deleted)", result);
return;
}

if (result.getError() != null) {
execution.fail(result.getError());
return;
}

log.debug("Received Result[size={}] for Query[{}]", result.getResults().size(), result.getExecutionId());
log.trace("Received Result\n{}", result.getResults());

Optional<ExecutionInfo> optInfo = tryGetExecutionInfo(execution.getId());

if (optInfo.isEmpty()) {
Expand All @@ -114,18 +118,13 @@ public <R extends ShardResult, E extends ManagedExecution & InternalExecution> v
return;
}

if (result.getError().isPresent()) {
execution.fail(result.getError().get());
}
else {
distributedInfo.addShardResult(result);
distributedInfo.addShardResult(result);

// If all known workers have returned a result, the query is DONE.
if (distributedInfo.allResultsArrived(getWorkerHandler(execution.getDataset()).getAllWorkerIds())) {
// If all known workers have returned a result, the query is DONE.
if (distributedInfo.allResultsArrived(getWorkerHandler(execution.getDataset()).getAllWorkerIds())) {

execution.finish(ExecutionState.DONE);
execution.finish(ExecutionState.DONE);

}
}

// State changed to DONE or FAILED
Expand All @@ -139,7 +138,7 @@ public <R extends ShardResult, E extends ManagedExecution & InternalExecution> v

/* This log is here to prevent an NPE which could occur when no strong reference to result.getResults()
existed anymore after the query finished and immediately was reset */
log.trace("Collected metrics for execution {}. Last result received: {}:", result.getQueryId(), result.getResults());
log.trace("Collected metrics for execution {}. Last result received: {}:", result.getExecutionId(), result.getResults());
}

}
Expand Down
Loading