下面列出了java.util.concurrent.atomic.AtomicLong#getAndIncrement() 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public synchronized List<TestData> addTests(TestSuite testSuite) {
List<TestData> result = new ArrayList<>(testSuite.size());
for (TestCase testCase : testSuite.getTestCaseList()) {
String id = testCase.getId();
AtomicLong count = ids.get(id);
if (count == null) {
ids.put(id, new AtomicLong(1));
} else {
id = id + "__" + count.getAndIncrement();
}
int testIndex = testIndexGenerator.incrementAndGet();
testCase.setId(id);
TestData test = new TestData(testIndex, testCase, testSuite);
result.add(test);
tests.put(id, test);
}
return result;
}
public void sendMessageToKafka(String resultTopic, String key, DbusMessage dbusMessage,
AtomicLong sendCnt, AtomicLong recvCnt, AtomicBoolean isError) throws Exception {
if (stringProducer == null) {
throw new Exception("producer is null, can't send to kafka!");
}
ProducerRecord record = new ProducerRecord<>(resultTopic, key, dbusMessage.toString());
sendCnt.getAndIncrement();
stringProducer.send(record, (metadata, e) -> {
if (e != null) {
logger.error("Send Message to kafka exception!", e);
isError.set(true);
} else {
recvCnt.getAndIncrement();
}
});
}
@Override
public Long publishSync(String channelName, BsonObject event) {
try {
final Table table = ensureTable(channelName);
final AtomicLong l = seqNums.computeIfAbsent(channelName, s -> new AtomicLong());
final Put put = new Put( Bytes.toBytes( l.get() ) );
final byte [] bytes = BsonCodec.bsonObjectToBsonBytes(event);
put.addColumn(colFamily,qualifier,bytes);
table.put(put);
return l.getAndIncrement();
} catch (Exception exp) {
log.error("Failed to publish event",exp);
return EventSink.SADLY_NO_CONCEPT_OF_A_MESSAGE_NUMBER;
}
}
@Test
public void postCompleteWithRequest() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
ArrayDeque<Integer> queue = new ArrayDeque<Integer>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertResult(1);
}
@Test
public void postCompleteCancelled() {
final TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
ArrayDeque<Integer> queue = new ArrayDeque<Integer>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return ts.isCancelled();
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
ts.cancel();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertEmpty();
}
@Test
public void postCompleteCancelledAfterOne() {
final TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
}
};
ArrayDeque<Integer> queue = new ArrayDeque<Integer>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return ts.isCancelled();
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertValue(1).assertNoErrors().assertNotComplete();
}
/**
* Creates and transfers a new flow file whose contents are the JSON-serialized value of the specified event, and the sequence ID attribute set
*
* @param session A reference to a ProcessSession from which the flow file(s) will be created and transferred
* @param eventInfo An event whose value will become the contents of the flow file
* @return The next available CDC sequence ID for use by the CDC processor
*/
@Override
public long writeEvent(final ProcessSession session, String transitUri, final UpdateRowsEventInfo eventInfo, final long currentSequenceId, Relationship relationship) {
final AtomicLong seqId = new AtomicLong(currentSequenceId);
for (Map.Entry<Serializable[], Serializable[]> row : eventInfo.getRows()) {
FlowFile flowFile = session.create();
flowFile = session.write(flowFile, outputStream -> {
super.startJson(outputStream, eventInfo);
super.writeJson(eventInfo);
final BitSet bitSet = eventInfo.getIncludedColumns();
writeRow(eventInfo, row, bitSet);
super.endJson();
});
flowFile = session.putAllAttributes(flowFile, getCommonAttributes(seqId.get(), eventInfo));
session.transfer(flowFile, relationship);
session.getProvenanceReporter().receive(flowFile, transitUri);
seqId.getAndIncrement();
}
return seqId.get();
}
/**
* Creates and transfers a new flow file whose contents are the JSON-serialized value of the specified event, and the sequence ID attribute set
*
* @param session A reference to a ProcessSession from which the flow file(s) will be created and transferred
* @param eventInfo An event whose value will become the contents of the flow file
* @return The next available CDC sequence ID for use by the CDC processor
*/
@Override
public long writeEvent(final ProcessSession session, String transitUri, final DeleteRowsEventInfo eventInfo, final long currentSequenceId, Relationship relationship) {
final AtomicLong seqId = new AtomicLong(currentSequenceId);
for (Serializable[] row : eventInfo.getRows()) {
FlowFile flowFile = session.create();
flowFile = session.write(flowFile, outputStream -> {
super.startJson(outputStream, eventInfo);
super.writeJson(eventInfo);
final BitSet bitSet = eventInfo.getIncludedColumns();
writeRow(eventInfo, row, bitSet);
super.endJson();
});
flowFile = session.putAllAttributes(flowFile, getCommonAttributes(seqId.get(), eventInfo));
session.transfer(flowFile, relationship);
session.getProvenanceReporter().receive(flowFile, transitUri);
seqId.getAndIncrement();
}
return seqId.get();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
@Override
public void checkPass(String serviceid, String token) {
AtomicLong times = checkPassTimesMap.get(serviceid);
if (times == null) {
checkPassTimesMap.putIfAbsent(serviceid, new AtomicLong());
times = checkPassTimesMap.get(serviceid);
}
times.getAndIncrement();
serviceStatus.put(serviceid, true);
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
public void run() {
phaser.arriveAndAwaitAdvance();
phaser.arriveAndAwaitAdvance();
AtomicLong a = adder;
for (int i = 0; i < incs; ++i)
a.getAndIncrement();
result = a.get();
phaser.arrive();
}
@Override
public void recordDroppedRequest(String category) {
AtomicLong counter = dropCounters.get(category);
if (counter == null) {
counter = dropCounters.putIfAbsent(category, new AtomicLong());
if (counter == null) {
counter = dropCounters.get(category);
}
}
counter.getAndIncrement();
}
protected String generateName() {
AtomicLong counter = nameMap.putIfAbsent(targetType, new AtomicLong(0L));
if (counter == null)
counter = nameMap.get(targetType);
return targetType + "#" + counter.getAndIncrement();
}
static void downloadActionContents(
Path root, String instanceName, Set<Digest> actionDigests, Channel channel)
throws IOException, InterruptedException {
ByteStreamStub bsStub = ByteStreamGrpc.newStub(channel);
ExecutorService service = newSingleThreadExecutor();
ListeningScheduledExecutorService retryService =
listeningDecorator(newSingleThreadScheduledExecutor());
Set<Digest> visitedDigests = Sets.newHashSet();
Set<Digest> visitedDirectories = Sets.newHashSet();
AtomicLong outstandingOperations = new AtomicLong(0);
for (Digest actionDigest : actionDigests) {
ByteString content = getBlobIntoFile("action", instanceName, actionDigest, bsStub, root);
Action action = Action.parseFrom(content);
Digest commandDigest = action.getCommandDigest();
if (!visitedDigests.contains(commandDigest)) {
visitedDigests.add(commandDigest);
outstandingOperations.getAndIncrement();
service.execute(
blobGetter(
root, instanceName, commandDigest, bsStub, outstandingOperations, retryService));
}
Digest inputRootDigest = action.getInputRootDigest();
if (!visitedDigests.contains(inputRootDigest)) {
visitedDirectories.add(inputRootDigest);
visitedDigests.add(inputRootDigest);
outstandingOperations.getAndIncrement();
service.execute(
directoryGetter(
root,
instanceName,
inputRootDigest,
visitedDirectories,
visitedDigests,
bsStub,
service,
outstandingOperations,
retryService));
}
}
while (outstandingOperations.get() > 0) {
System.out.println("Waiting on " + outstandingOperations.get() + " operations");
TimeUnit.SECONDS.sleep(5);
}
service.shutdown();
service.awaitTermination(1, TimeUnit.MINUTES);
retryService.shutdown();
retryService.awaitTermination(1, TimeUnit.MINUTES);
}