Skip to content

Commit

Permalink
Improved fix for #45
Browse files Browse the repository at this point in the history
  • Loading branch information
Aklakan committed May 3, 2014
1 parent 34f40bf commit e0841dc
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 78 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ public boolean isEmpty() {

// Check if the query is a count query without grouping, because then its not empty after all
int argCount = projection.getNames().size();
if(argCount == 1 && groupByExprs.isEmpty()) {
if(result && (argCount == 1 && groupByExprs.isEmpty())) {
boolean hasCount = containsCount(projection);

result = !hasCount;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1034,100 +1034,111 @@ public Mapping joinCommon(Mapping a, Mapping initB, boolean isLeftJoin) {
Set<Var> varsAOnly = Sets.difference(varsA, commonVars);
Set<Var> varsBOnly = Sets.difference(varsB, commonVars);

Multimap<Var, RestrictedExpr> newVarDef = HashMultimap.create();
Set<Var> varsAll = Sets.union(varsA, varsB);

// Add definitions of A only
for(Var varA : varsAOnly) {
Collection<RestrictedExpr> exprs = vdA.getMap().get(varA);
newVarDef.putAll(varA, exprs);
}

// Add definitions of B only
for(Var varB : varsBOnly) {
Collection<RestrictedExpr> exprs = vdB.getMap().get(varB);
newVarDef.putAll(varB, exprs);
}
Multimap<Var, RestrictedExpr> commonVarDef = HashMultimap.create();

// Add common definitions
Set<SqlExpr> joinCondition = new HashSet<SqlExpr>();

//Multimap<Var, RestrictedExpr> newVarDef = HashMultimap.create();
// Try to collect join conditions - this may 'fail' if a condition is unsatisifiable
Set<SqlExpr> joinCondition = new HashSet<SqlExpr>();

boolean isJoinConditionSatisfiable = true;
for(Var commonVar : commonVars) {
Collection<RestrictedExpr> defsA = a.getVarDefinition().getDefinitions(commonVar);
Collection<RestrictedExpr> defsB = b.getVarDefinition().getDefinitions(commonVar);

VarDefKey ors = joinDefinitionsOnEquals(defsA, defsB, typeMap, sqlTranslator);

if(isLeftJoin) {
newVarDef.putAll(commonVar, defsA);
}


if(ors == null) {
// // Bail out on unsatisfiable join condition
isJoinConditionSatisfiable = false;
boolean isJoinConditionSatisfiable = true;
for(Var commonVar : commonVars) {
Collection<RestrictedExpr> defsA = a.getVarDefinition().getDefinitions(commonVar);
Collection<RestrictedExpr> defsB = b.getVarDefinition().getDefinitions(commonVar);

VarDefKey ors = joinDefinitionsOnEquals(defsA, defsB, typeMap, sqlTranslator);

if(!isLeftJoin) {
newVarDef.put(commonVar, new RestrictedExpr(NodeValue.nvNothing));
}
}
else {
if(!isLeftJoin) {
newVarDef.putAll(commonVar, ors.definitionExprs);
}
}

if(isJoinConditionSatisfiable) {
// Stop collecting join conditions if they are unsatisfiable anyway

// Don't bother adding TRUE conditions
SqlExpr or = SqlExprUtils.orifyBalanced(ors.constraintExpr);
if(or == null || or.equals(S_Constant.TRUE)) {
continue;
}

//joinCondition.addAll(ors.constraintExpr);
joinCondition.add(or);
}
}

SqlOp resultSqlOp;
if(ors == null) {
// Bail out on unsatisfiable join condition
isJoinConditionSatisfiable = false;
break;
}

commonVarDef.putAll(commonVar, ors.definitionExprs);

// Don't bother adding TRUE conditions
SqlExpr or = SqlExprUtils.orifyBalanced(ors.constraintExpr);
if(or == null || or.equals(S_Constant.TRUE)) {
continue;
}

//joinCondition.addAll(ors.constraintExpr);
joinCondition.add(or);
}


/// Process the variable definitions

Multimap<Var, RestrictedExpr> newVarDef = HashMultimap.create();
SqlOp resultSqlOp;

List<SqlExpr> jc = new ArrayList<SqlExpr>(joinCondition);

// Add the var defs of the left hand side
if(isLeftJoin) {
// Add all vardefs of the left hand side
for(Var varA : varsA) {
Collection<RestrictedExpr> exprs = vdA.getMap().get(varA);
newVarDef.putAll(varA, exprs);
}

if(isJoinConditionSatisfiable) {
for(Var varB : varsBOnly) {
Collection<RestrictedExpr> exprs = vdB.getMap().get(varB);
newVarDef.putAll(varB, exprs);
}

opJoin.getConditions().addAll(jc);
resultSqlOp = opResult;
}
else {
// Add definitions of B only
// for(Var varB : varsBOnly) {
// newVarDef.put(varB, new RestrictedExpr(E_RdfTerm.TYPE_ERROR));
// }

if(!isJoinConditionSatisfiable) {
//newVarDef.clear();

if(isLeftJoin) {
resultSqlOp = a.getSqlOp();
}
}
else {

if(isJoinConditionSatisfiable) {
// Add common definitions
newVarDef.putAll(commonVarDef);

for(Var varA : varsAOnly) {
Collection<RestrictedExpr> exprs = vdA.getMap().get(varA);
newVarDef.putAll(varA, exprs);
}

// Add definitions of B only
for(Var varB : varsBOnly) {
Collection<RestrictedExpr> exprs = vdB.getMap().get(varB);
newVarDef.putAll(varB, exprs);
}

resultSqlOp = SqlOpFilter.create(opResult, jc);

}
else {
else {
// for(Var var : varsAll) {
// newVarDef.put(var, new RestrictedExpr(E_RdfTerm.TYPE_ERROR));
// }

resultSqlOp = SqlOpEmpty.create(opJoin.getSchema());
}
}
else {

List<SqlExpr> jc = new ArrayList<SqlExpr>(joinCondition);

if(joinType.equals(JoinType.LEFT)) {
opJoin.getConditions().addAll(jc);
resultSqlOp = opResult;
} else {

//ExprList jc = new ExprList(new ArrayList<Expr>(joinCondition));
resultSqlOp = SqlOpFilter.create(opResult, jc);
}
}
}
}


VarDefinition newVarDefinition = new VarDefinition(newVarDef);


// TODO Minimize the schema to only the referenced columns

//List<String> refs = newVarDefinition.getReferencedNames();




Mapping result = new Mapping(newVarDefinition, resultSqlOp);

return result;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,18 @@ public static TypeSystem createDefaultDatatypeSystem() {

// String basePath = "src/main/resources";
try {
/*
Map<String, String> typeNameToClass = MapReader.read(SparqlifyCoreInit.class.getResourceAsStream("/type-class.tsv"));
Map<String, String> typeNameToUri = MapReader.read(SparqlifyCoreInit.class.getResourceAsStream("/type-uri.tsv"));
Map<String, String> typeHierarchy = MapReader.read(SparqlifyCoreInit.class.getResourceAsStream("/type-hierarchy.default.tsv"));
Map<String, String> physicalTypeMap = MapReader.read(SparqlifyCoreInit.class.getResourceAsStream("/type-map.h2.tsv"));
Map<String, String> rdfTypeHierarchyRaw = MapReader.read(SparqlifyCoreInit.class.getResourceAsStream("/rdf-type-hierarchy.tsv"));
*/

Map<String, String> typeNameToClass = MapReader
.readFromResource("/type-class.tsv");
Map<String, String> typeNameToUri = MapReader
Expand Down

0 comments on commit e0841dc

Please sign in to comment.