Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hints #152

Open
wants to merge 52 commits into
base: hints
Choose a base branch
from
Open

Hints #152

Changes from 1 commit
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
2f6f143
Update README.md
nmacedo Feb 5, 2020
cfda1fb
Update README.md
nmacedo Feb 5, 2020
6006397
Merge pull request #114 from haslab/dev
nmacedo Aug 7, 2020
5fefab1
Update .travis.yml
nmacedo Nov 13, 2020
f630973
Update .travis.yml
nmacedo Nov 13, 2020
55bb7ec
Update .travis.yml
nmacedo Nov 13, 2020
258da57
meteor updated
nmacedo Jan 20, 2023
7123311
Update gitignore
h-neto May 4, 2023
4aeba79
Rewritten API in Quarkus
h-neto May 16, 2023
72d585b
Proguess Save In Hint Implementation
h-neto Jun 2, 2023
419a638
Implemented Difference Algorithm
h-neto Jun 3, 2023
e8526c6
Mutaters + Several BugFixes/Workarounds
h-neto Jun 13, 2023
0cf234b
Update
h-neto Jun 21, 2023
2f1d190
Renable Mutation Hints
h-neto Jun 21, 2023
e2305c0
Small fix
h-neto Jun 28, 2023
44757f1
Small Fix
h-neto Jul 3, 2023
eb35428
Prestatistics cleanup
h-neto Jul 4, 2023
83528d4
Fixes + Test Methods
h-neto Jul 12, 2023
3782351
Major Cleanup
h-neto Jul 17, 2023
c72f847
Refactors
h-neto Jul 18, 2023
d4291f4
Cleanup of Edit operations
h-neto Jul 18, 2023
5ea93d2
QoL change
h-neto Jul 18, 2023
6f0a8c4
Position Map fix
h-neto Jul 18, 2023
43403e7
Nomenclature Fix
h-neto Jul 20, 2023
f652059
Update + Refactor
h-neto Jul 27, 2023
1fcb167
Quarkus Update
h-neto Jul 27, 2023
a18aafb
Update
h-neto Sep 12, 2023
95a6208
Update
h-neto Sep 13, 2023
4ca279d
Update
h-neto Sep 14, 2023
a11f2df
BugFixes
h-neto Sep 22, 2023
17a0466
Update
h-neto Sep 28, 2023
20e3645
Cleanup
h-neto Oct 9, 2023
53cd3c6
Rewrite Policy Algorithm
h-neto Oct 13, 2023
4278cb6
FIX
h-neto Oct 13, 2023
df5ec5c
UPDATE
h-neto Oct 15, 2023
af7fd78
Fix
h-neto Oct 15, 2023
dd02d57
Update
h-neto Oct 16, 2023
ea415c0
Normalization Redone
h-neto Oct 17, 2023
879e1b7
Fixes
h-neto Oct 17, 2023
fd9c49e
Update
h-neto Oct 20, 2023
75dd408
Update
h-neto Oct 21, 2023
8355c7d
Update
h-neto Oct 23, 2023
afdc3ff
Create LICENSE
nmacedo Oct 24, 2023
7eedff4
meteor and deps updated
nmacedo Oct 24, 2023
80aa185
Merge branch 'dev' of github.com:haslab/Alloy4Fun into dev
nmacedo Oct 24, 2023
c97f9b2
Merge pull request #151 from haslab/dev
nmacedo Oct 24, 2023
0057047
Fixes
h-neto Oct 24, 2023
be8a95a
Fix
h-neto Oct 24, 2023
457f346
Merge pull request #1 from haslab/master
h-neto Feb 7, 2024
cacc4d7
Dependency Fix
h-neto Jun 16, 2024
b6b89e3
Merge remote-tracking branch 'origin/master' into dev
h-neto Jun 16, 2024
b8f8950
Cleanup
h-neto Jun 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update
h-neto committed Oct 23, 2023
commit 8355c7d25c76f5e2b014b16909b96e05b50fbb3f
16 changes: 16 additions & 0 deletions api/src/main/java/pt/haslab/specassistant/data/policy/Binary.java
Original file line number Diff line number Diff line change
@@ -27,6 +27,8 @@ public Double apply(Transition transition) {
case "/" -> left.apply(transition) / right.apply(transition);
case "+" -> left.apply(transition) + right.apply(transition);
case "-" -> left.apply(transition) - right.apply(transition);
case "max" -> Math.max(left.apply(transition), right.apply(transition));
case "min" -> Math.min(left.apply(transition), right.apply(transition));
default -> throw new UnkownOperationException("Unkown operation " + operator);
};
}
@@ -47,6 +49,15 @@ public static Binary sub(PolicyRule left, PolicyRule right) {
return new Binary("-", left, right);
}

public static Binary max(PolicyRule left, PolicyRule right) {
return new Binary("max", left, right);
}

public static Binary min(PolicyRule left, PolicyRule right) {
return new Binary("min", left, right);
}


@Override
public String toString() {
return "(" + left + " " + operator + " " + right + ")";
@@ -60,4 +71,9 @@ public static PolicyRule scale(Double scalar, PolicyRule p) {
public static PolicyRule sumOld(PolicyRule p) {
return new Binary("+", p, Var.old());
}

public static PolicyRule oneMinusPrefTimesCost(PolicyRule pref, PolicyRule cost) {
return sum(mult(sub(Constant.of(1.0), pref), cost), Var.old());
}

}
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@
import lombok.Data;
import lombok.NoArgsConstructor;

import java.util.List;
import java.util.Map;

@Data
@AllArgsConstructor
@@ -26,16 +26,15 @@ public static PolicyOption maxPercentage(PolicyRule r) {
return new PolicyOption(r, 1.0, Objective.MAX);
}

public static final List<PolicyOption> samples = List.of(
minimize(PolicyRule.costPrefMix(Var.arrivals(), Var.ted())),
minimize(PolicyRule.costPrefMix(Var.arrivals(), Binary.sum(Binary.scale(0.7, Var.ted()), Binary.scale(0.3, Var.complexity())))),
minimize(Binary.sumOld(Var.ted())),
minimize(Binary.sumOld(Var.complexity())),
minimize(Binary.sumOld(Constant.of(1.0))),
minimize(Binary.sumOld(Binary.sum(Binary.scale(0.7, Var.ted()), Binary.scale(0.3, Var.complexity())))),
maxPercentage(Binary.mult(Var.arrivals(), Var.old())),
maxPercentage(Binary.mult(Var.arrivals(), Var.old()))

public static final Map<String,PolicyOption> samples = Map.of(
"TEDxArrival",minimize(Binary.oneMinusPrefTimesCost(Var.arrivals(), Var.ted())),
"TEDCOMPxArrival",minimize(Binary.oneMinusPrefTimesCost(Var.arrivals(), Binary.sum(Binary.scale(0.7, Var.ted()), Binary.scale(0.3, Var.complexity())))),
"TED",minimize(Binary.sumOld(Var.ted())),
"COMP",minimize(Binary.sumOld(Var.complexity())),
"ONE",minimize(Binary.sumOld(Constant.of(1.0))),
"TEDCOMP",minimize(Binary.sumOld(Binary.sum(Binary.scale(0.7, Var.ted()), Binary.scale(0.3, Var.complexity())))),
"Arrival",maxPercentage(Binary.mult(Var.arrivals(), Var.old())),
"Departure",maxPercentage(Binary.mult(Var.departures(), Var.old()))
);

}
Original file line number Diff line number Diff line change
@@ -22,7 +22,4 @@ public abstract class PolicyRule implements Function<Transition, Double> {

public abstract void normalizeByGraph(ObjectId objectId);

public static PolicyRule costPrefMix(PolicyRule pref, PolicyRule cost) {
return Binary.sum(Binary.mult(Binary.sub(Constant.of(1.0), pref), cost), Var.old());
}
}
Original file line number Diff line number Diff line change
@@ -32,6 +32,10 @@ public static Var arrivals() {
return of(Name.ARRIVALS);
}

public static Var departures() {
return of(Name.DEPARTURES);
}

@Override
public String toString() {
return var.toString();
Original file line number Diff line number Diff line change
@@ -5,9 +5,9 @@
import lombok.Synchronized;
import org.bson.Document;
import org.bson.types.ObjectId;
import pt.haslab.specassistant.data.aggregation.Transition;
import pt.haslab.specassistant.data.models.Edge;
import pt.haslab.specassistant.data.models.Node;
import pt.haslab.specassistant.data.aggregation.Transition;

import java.util.Collection;
import java.util.List;
@@ -44,13 +44,13 @@ public void clearPolicyFromGraph(ObjectId graph_id) {
update(new Document("$unset", new Document("policy", null))).where("graph_id", graph_id);
}

public Stream<Transition> streamTransitionsByDestinationScoreGT(Node destination, Double min) {
public Stream<Transition> streamTransitionsByDestinationScoreNull(Node destination, Double min) {
return StreamSupport.stream(mongoCollection().aggregate(List.of(
new Document("$match", new Document("destination", destination.id)),
new Document("$replaceRoot", new Document("newRoot", new Document("edge", "$$ROOT"))),
new Document("$lookup", new Document("from", "Node").append("localField", "edge.origin").append("foreignField", "_id").append("as", "from")),
new Document("$unwind", "$from"),
new Document("$match", new Document("$or", List.of(new Document("from.score", new Document("$gt", min)), new Document("from.score", null))))
new Document("$match", new Document(new Document("from.score", new Document("$ne", null))))
), Transition.class).spliterator(), false).peek(x -> x.setTo(destination));
}

Original file line number Diff line number Diff line change
@@ -9,6 +9,7 @@

import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;

@@ -19,6 +20,10 @@ public Stream<Model> streamByDerivationOfAndOriginal(String derivationOf, String
return find("derivationOf = ?1 and original = ?2", derivationOf, original).stream();
}

public Stream<Model> streamByDerivationOfInAndOriginal(Collection< String> derivationOfx, String original) {
return find("derivationOf in ?1 and original = ?2", derivationOfx, original).stream();
}

public Stream<Model> streamByOriginalAndUnSat(String original_id) {
return ByOriginalAndUnSat(original_id).stream();
}
Original file line number Diff line number Diff line change
@@ -70,30 +70,30 @@ private static boolean testSpecModifications(CompModule original, CompModule tar
//List<Sig> tSigs = original.getAllSigs().makeConstList();
}

private Map<ObjectId, ObjectId> walkModelTreeStep(Predicate<Model> model_filter, Map<String, Challenge> cmdToExercise, Predicate<CompModule> modifiedPred, Model current, Map<ObjectId, ObjectId> context) {
private Map<ObjectId, ObjectId> walkModelTreeStep(Predicate<Model> model_filter, Map<String, Challenge> cmdToChallenge, Predicate<CompModule> modifiedPred, Model current, Map<ObjectId, ObjectId> context) {
try {
if (model_filter.test(current) && current.isValidExecution() && cmdToExercise.containsKey(current.getCmd_n())) {
if (model_filter.test(current) && current.isValidExecution() && cmdToChallenge.containsKey(current.getCmd_n())) {

CompModule world = ParseUtil.parseModel(current.getCode());
Challenge exercise = cmdToExercise.get(current.getCmd_n());
ObjectId contextId = exercise.id;
Challenge challenge = cmdToChallenge.get(current.getCmd_n());
ObjectId contextId = challenge.id;
ObjectId old_node_id = context.get(contextId);

boolean modified = modifiedPred.test(world);

if (exercise.isValidCommand(world, current.getCmd_i())) {
if (challenge.isValidCommand(world, current.getCmd_i())) {
boolean valid = current.getSat() == 0;

Map<String, String> formula = Node.getNormalizedFormulaFrom(world.getAllFunc().makeConstList(), exercise.getTargetFunctions());
Map<String, String> formula = Node.getNormalizedFormulaFrom(world.getAllFunc().makeConstList(), challenge.getTargetFunctions());

nodeRepo.incrementOrCreate(formula, valid, exercise.getGraph_id(), modified ? current.getId() : null);
ObjectId new_node_id = nodeRepo.findByGraphIdAndFormula(exercise.getGraph_id(), formula).orElseThrow().getId();
nodeRepo.incrementOrCreate(formula, valid, challenge.getGraph_id(), modified ? current.getId() : null);
ObjectId new_node_id = nodeRepo.findByGraphIdAndFormula(challenge.getGraph_id(), formula).orElseThrow().getId();

if (!old_node_id.equals(new_node_id)) { // No laces
nodeRepo.incrementLeaveById(old_node_id);
context = new HashMap<>(context); // Make a new context based on the previous one
context.put(contextId, new_node_id);
edgeRepo.incrementOrCreate(exercise.getGraph_id(), old_node_id, new_node_id);
edgeRepo.incrementOrCreate(challenge.getGraph_id(), old_node_id, new_node_id);
}
}
}
@@ -105,25 +105,25 @@ private Map<ObjectId, ObjectId> walkModelTreeStep(Predicate<Model> model_filter,
return context;
}

public CompletableFuture<Void> walkModelTree(Model root, Predicate<Model> model_filter, Collection<Challenge> exercises) {
if (exercises.isEmpty())
public CompletableFuture<Void> walkModelTree(Model root, Predicate<Model> model_filter, Collection<Challenge> challenges) {
if (challenges.isEmpty())
return CompletableFuture.completedFuture(null);

CompModule world = ParseUtil.parseModel(root.getCode());

Map<String, Challenge> cmdToExercise = exercises.stream().collect(Collectors.toUnmodifiableMap(Challenge::getCmd_n, x -> x));
Map<ObjectId, ObjectId> exerciseToNodeId = new HashMap<>();
Map<String, Challenge> cmdToChallenge = challenges.stream().collect(Collectors.toUnmodifiableMap(Challenge::getCmd_n, x -> x));
Map<ObjectId, ObjectId> challengeToNodeId = new HashMap<>();

cmdToExercise.values().forEach(exercise -> {
Map<String, String> formula = Node.getNormalizedFormulaFrom(world.getAllFunc().makeConstList(), exercise.getTargetFunctions());
nodeRepo.incrementOrCreate(formula, false, exercise.getGraph_id(), null);
exerciseToNodeId.put(exercise.id, nodeRepo.findByGraphIdAndFormula(exercise.getGraph_id(), formula).orElseThrow().getId());
cmdToChallenge.values().forEach(challenge -> {
Map<String, String> formula = Node.getNormalizedFormulaFrom(world.getAllFunc().makeConstList(), challenge.getTargetFunctions());
nodeRepo.incrementOrCreate(formula, false, challenge.getGraph_id(), null);
challengeToNodeId.put(challenge.id, nodeRepo.findByGraphIdAndFormula(challenge.getGraph_id(), formula).orElseThrow().getId());
}
);
return abstractWalkModelTree(
(m, ctx) -> walkModelTreeStep(model_filter, cmdToExercise, w -> testSpecModifications(world, w), m, ctx),
(m, ctx) -> walkModelTreeStep(model_filter, cmdToChallenge, w -> testSpecModifications(world, w), m, ctx),
model -> modelRepo.streamByDerivationOfAndOriginal(model.getId(), model.getOriginal()),
exerciseToNodeId,
challengeToNodeId,
root
);
}
@@ -175,17 +175,17 @@ public CompletableFuture<Void> parseModelTree(String model_id, Predicate<Model>
return parseModelTree(model_id, model_filter, challengeRepo.streamByModelId(model_id).collect(Collectors.toSet()));
}

public CompletableFuture<Void> parseModelTree(String model_id, Predicate<Model> model_filter, Collection<Challenge> exercises) {
public CompletableFuture<Void> parseModelTree(String model_id, Predicate<Model> model_filter, Collection<Challenge> challenges) {
Model model = modelRepo.findById(model_id);

CompModule base_world = ParseUtil.parseModel(model.getCode());

AtomicLong parsingTime = new AtomicLong(System.nanoTime());
Map<ObjectId, Long> count = exercises.stream().map(Challenge::getGraph_id).collect(Collectors.toMap(x -> x, x -> nodeRepo.getTotalVisitsFromGraph(x)));
Map<ObjectId, Long> count = challenges.stream().map(Challenge::getGraph_id).collect(Collectors.toMap(x -> x, x -> nodeRepo.getTotalVisitsFromGraph(x)));

return walkModelTree(model, model_filter, exercises)
return walkModelTree(model, model_filter, challenges)
.thenRun(() -> parsingTime.updateAndGet(l -> System.nanoTime() - l))
.thenCompose(nil -> FutureUtil.runEachAsync(exercises,
.thenCompose(nil -> FutureUtil.runEachAsync(challenges,
ex -> {
long st = System.nanoTime();
AtomicLong local_count = new AtomicLong();
Original file line number Diff line number Diff line change
@@ -33,7 +33,7 @@ public class GraphManager {
@Inject
EdgeRepository edgeRepo;
@Inject
ChallengeRepository exerciseRepo;
ChallengeRepository challengeRepo;

public void cleanGraph(ObjectId graph_id) {
nodeRepo.deleteByGraphId(graph_id);
@@ -42,51 +42,51 @@ public void cleanGraph(ObjectId graph_id) {

public void deleteGraph(ObjectId graph_id) {
cleanGraph(graph_id);
exerciseRepo.deleteByGraphId(graph_id);
challengeRepo.deleteByGraphId(graph_id);
Graph.deleteById(graph_id);
}

public void dropEverything() {
Graph.deleteAll();
nodeRepo.deleteAll();
edgeRepo.deleteAll();
exerciseRepo.deleteAll();
challengeRepo.deleteAll();
}

public void generateChallenge(ObjectId graph_id, String model_id, Integer secretCommandCount, String cmd_n, Set<String> targetFunctions) {
if (exerciseRepo.notExistsModelIdAndCmdN(model_id, cmd_n)) {
exerciseRepo.persistOrUpdate(new Challenge(model_id, graph_id, secretCommandCount, cmd_n, targetFunctions));
if (challengeRepo.notExistsModelIdAndCmdN(model_id, cmd_n)) {
challengeRepo.persistOrUpdate(new Challenge(model_id, graph_id, secretCommandCount, cmd_n, targetFunctions));
}
}

public void generateExercisesWithGraphIdFromSecrets(Function<String, ObjectId> commandToGraphId, String model_id) {
public void generateChallengesWithGraphIdFromSecrets(Function<String, ObjectId> commandToGraphId, String model_id) {
Model m = modelRepo.findByIdOptional(model_id).orElseThrow();
CompModule world = ParseUtil.parseModel(m.getCode());
List<Pos> secretPositions = secretPos(world.path, m.getCode());

Map<String, Set<String>> targets = AlloyUtil.getFunctionWithPositions(world, secretPositions);
Integer cmdCount = targets.size();

exerciseRepo.persistOrUpdate(targets.entrySet().stream().map(x -> new Challenge(model_id, commandToGraphId.apply(x.getKey()), cmdCount, x.getKey(), x.getValue())));
challengeRepo.persistOrUpdate(targets.entrySet().stream().map(x -> new Challenge(model_id, commandToGraphId.apply(x.getKey()), cmdCount, x.getKey(), x.getValue())));
}

public Set<ObjectId> getModelGraphs(String modelid) {
return exerciseRepo.streamByModelId(modelid).map(Challenge::getGraph_id).collect(Collectors.toSet());
return challengeRepo.streamByModelId(modelid).map(Challenge::getGraph_id).collect(Collectors.toSet());
}

public void deleteAllGraphStructures() {
nodeRepo.deleteAll();
edgeRepo.deleteAll();
}

public void deleteExerciseByModelIDs(List<String> ids, boolean cascadeToGraphs) {
public void deleteChallengesByModelIDs(List<String> ids, boolean cascadeToGraphs) {
if (!cascadeToGraphs) {
exerciseRepo.deleteByModelIdIn(ids);
challengeRepo.deleteByModelIdIn(ids);
} else {
Set<ObjectId> graph_ids = exerciseRepo.streamByModelIdIn(ids).map(Challenge::getGraph_id).collect(Collectors.toSet());
exerciseRepo.deleteByModelIdIn(ids);
Set<ObjectId> graph_ids = challengeRepo.streamByModelIdIn(ids).map(Challenge::getGraph_id).collect(Collectors.toSet());
challengeRepo.deleteByModelIdIn(ids);
graph_ids.forEach(graph_id -> {
if (!exerciseRepo.containsGraph(graph_id))
if (!challengeRepo.containsGraph(graph_id))
deleteGraph(graph_id);
});
}
Loading