use of com.pamirs.pradar.exception.PressureMeasureError in project LinkAgent by shulieTech.
the class MultiPercolateRequestIndexRename method reindex0.
@Override
public List<String> reindex0(Object target) {
MultiPercolateRequest mReq = (MultiPercolateRequest) target;
List list = mReq.requests();
List<String> indexes = new ArrayList<String>();
for (Object req : list) {
if (req == null) {
continue;
}
RequestIndexRename requestIndexRename = RequestIndexRenameProvider.get(req);
if (requestIndexRename != null) {
indexes.addAll(requestIndexRename.reindex(req));
} else {
throw new PressureMeasureError("elasticsearch " + req.getClass().getName() + " is not supported!");
}
}
String[] indices = mReq.indices();
for (int i = 0, len = indices.length; i < len; i++) {
String index = indices[i];
/**
* 如果在白名单中则不允许写
*/
if (GlobalConfig.getInstance().getSearchWhiteList().contains(index)) {
throw new PressureMeasureError("Cluster Test request can't refresh business index ! " + index);
}
if (!Pradar.isClusterTestPrefix(index)) {
index = Pradar.addClusterTestPrefixLower(index);
indices[i] = index;
}
}
return indexes;
}
use of com.pamirs.pradar.exception.PressureMeasureError in project LinkAgent by shulieTech.
the class MongoExecuteInterceptor method getParameter0.
@Override
public Object[] getParameter0(Advice advice) throws Throwable {
if (!Pradar.isClusterTest()) {
return advice.getParameterArray();
}
Object[] args = advice.getParameterArray();
Integer operationNum = operationNumMap.get(args[0].getClass());
if (operationNum == null) {
LOGGER.error("not support operation class is {} ", args[0].getClass().getName());
throw new PressureMeasureError("[2]mongo not support pressure operation class is " + args[0].getClass().getName());
}
List<ServerAddress> serverAddresses = ((MongoClient) advice.getTarget()).getAllAddress();
ShadowDatabaseConfig shadowDatabaseConfig = getShadowDatabaseConfig(serverAddresses);
if (operationNum > 7 && shadowDatabaseConfig == null) {
ErrorReporter.Error error = ErrorReporter.buildError().setErrorType(ErrorTypeEnum.DataSource).setErrorCode("datasource-0005").setMessage("mongo 未配置对应影子表或者影子库").setDetail("mongo 未配置对应影子表或者影子库");
error.closePradar(ConfigNames.SHADOW_DATABASE_CONFIGS);
error.report();
throw new PressureMeasureError("mongo 未配置对应影子表或者影子库");
}
if (shadowDatabaseConfig.isShadowDatabase()) {
return advice.getParameterArray();
}
MongoNamespace busMongoNamespace;
switch(operationNum) {
case FIND:
objectFieldMapAdd(FindOperation.class);
busMongoNamespace = ((FindOperation) args[0]).getNamespace();
setReadPtMongoNamespace(FindOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case COUNT:
objectFieldMapAdd(CountOperation.class);
busMongoNamespace = (MongoNamespace) objectFieldMap.get(CountOperation.class).get(args[0]);
setReadPtMongoNamespace(CountOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case DISTINCT:
objectFieldMapAdd(DistinctOperation.class);
busMongoNamespace = (MongoNamespace) objectFieldMap.get(DistinctOperation.class).get(args[0]);
setReadPtMongoNamespace(DistinctOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case GROUP:
objectFieldMapAdd(GroupOperation.class);
busMongoNamespace = ((GroupOperation) args[0]).getNamespace();
setReadPtMongoNamespace(GroupOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case LIST_INDEXES:
objectFieldMapAdd(ListIndexesOperation.class);
busMongoNamespace = (MongoNamespace) objectFieldMap.get(ListIndexesOperation.class).get(args[0]);
setReadPtMongoNamespace(ListIndexesOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case MAP_REDUCE_WITH_INLINE:
busMongoNamespace = ((MapReduceWithInlineResultsOperation) args[0]).getNamespace();
setReadPtMongoNamespace(MapReduceWithInlineResultsOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case PARALLEL_COLLECTION_SCAN:
objectFieldMapAdd(ParallelCollectionScanOperation.class);
busMongoNamespace = (MongoNamespace) objectFieldMap.get(ParallelCollectionScanOperation.class).get(args[0]);
setReadPtMongoNamespace(ParallelCollectionScanOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case MIXED_BULK_WRITE:
objectFieldMapAdd(MixedBulkWriteOperation.class);
busMongoNamespace = ((MixedBulkWriteOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(MixedBulkWriteOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case BASE_WRITE:
objectFieldMapAdd(BaseWriteOperation.class);
busMongoNamespace = ((BaseWriteOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(BaseWriteOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case FIND_AND_DELETE:
objectFieldMapAdd(FindAndDeleteOperation.class);
busMongoNamespace = ((FindAndDeleteOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(FindAndDeleteOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case FIND_AND_REPLACE:
objectFieldMapAdd(FindAndReplaceOperation.class);
busMongoNamespace = ((FindAndReplaceOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(FindAndReplaceOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case FIND_AND_UPDATE:
objectFieldMapAdd(FindAndUpdateOperation.class);
busMongoNamespace = ((FindAndUpdateOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(FindAndUpdateOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case MAP_REDUCE_TO_COLLECTION:
objectFieldMapAdd(MapReduceToCollectionOperation.class);
busMongoNamespace = ((MapReduceToCollectionOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(MapReduceToCollectionOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case INSERT_TO_COLLECTION:
objectFieldMapAdd(InsertOperation.class);
busMongoNamespace = ((InsertOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(InsertOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case UPDATE_OPERATION:
objectFieldMapAdd(UpdateOperation.class);
busMongoNamespace = ((UpdateOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(UpdateOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case DELETE_OPERATION:
objectFieldMapAdd(DeleteOperation.class);
busMongoNamespace = ((DeleteOperation) (args[0])).getNamespace();
setWritePtMongoNamespace(DeleteOperation.class, args[0], busMongoNamespace, shadowDatabaseConfig);
break;
default:
LOGGER.error("not support operation class is {} ", args[0].getClass().getName());
throw new PressureMeasureError("[3]mongo not support pressure operation class is " + args[0].getClass().getName());
}
return advice.getParameterArray();
}
use of com.pamirs.pradar.exception.PressureMeasureError in project LinkAgent by shulieTech.
the class SyncDelegateOperationExecutorInterceptor method getParameter0.
@Override
public Object[] getParameter0(Advice advice) throws Throwable {
if (!Pradar.isClusterTest()) {
return advice.getParameterArray();
}
Object[] args = advice.getParameterArray();
Integer operationNum = operationNumMap.get(args[0].getClass().getSimpleName());
if (operationNum == null) {
LOGGER.error("not support operation class is {} ", args[0].getClass().getName());
throw new PressureMeasureError("[4]mongo not support pressure operation class is " + args[0].getClass().getName());
}
if (mongoClientDelegate == null) {
Field field = null;
try {
field = advice.getTarget().getClass().getDeclaredField("this$0");
field.setAccessible(true);
mongoClientDelegate = (MongoClientDelegate) field.get(advice.getTarget());
} catch (Throwable e) {
LOGGER.error("DelegateOperationExecutorInterceptor error {}", e);
} finally {
if (field != null) {
field.setAccessible(false);
}
}
}
ClusterSettings clusterSettings = (ClusterSettings) ReflectionUtils.getFieldValue(ReflectionUtils.getFieldValue(mongoClientDelegate, "cluster"), "settings");
List<ServerAddress> serverAddresses = clusterSettings.getHosts();
ShadowDatabaseConfig shadowDatabaseConfig = null;
for (ServerAddress serverAddress : serverAddresses) {
shadowDatabaseConfig = GlobalConfig.getInstance().getShadowDatabaseConfig(serverAddress.toString());
if (shadowDatabaseConfig != null) {
break;
}
}
final Field field = objectFieldMap.get(args[0].getClass());
if (field == null) {
final Field namespace = ReflectionUtils.getDeclaredField(args[0], "namespace");
namespace.setAccessible(Boolean.TRUE);
objectFieldMap.put(args[0].getClass(), namespace);
}
MongoNamespace busMongoNamespace = (MongoNamespace) objectFieldMap.get(args[0].getClass()).get(args[0]);
switch(operationNum) {
case 1:
setReadPtMongoNamespace(args[0], busMongoNamespace, shadowDatabaseConfig);
break;
case 2:
setWritePtMongoNamespace(args[0], busMongoNamespace, shadowDatabaseConfig);
break;
default:
LOGGER.error("not support operation class is {} ", args[0].getClass().getName());
throw new PressureMeasureError("[5]mongo not support pressure operation class is " + args[0].getClass().getName());
}
return advice.getParameterArray();
}
use of com.pamirs.pradar.exception.PressureMeasureError in project LinkAgent by shulieTech.
the class AbstractDBCollectionInterceptor method getBusMongoClient.
private Mongo getBusMongoClient(Advice advice) {
Field field = null;
Field field1 = null;
try {
field = advice.getTarget().getClass().getDeclaredField("executor");
field.setAccessible(true);
Object object = field.get(advice.getTarget());
field1 = object.getClass().getDeclaredField("this$0");
field1.setAccessible(true);
return (Mongo) field1.get(object);
} catch (Exception e) {
LOGGER.error("getBusMongoClient error ", e);
throw new PressureMeasureError(e.getMessage());
} finally {
if (field != null) {
field.setAccessible(false);
}
if (field1 != null) {
field1.setAccessible(false);
}
}
}
use of com.pamirs.pradar.exception.PressureMeasureError in project LinkAgent by shulieTech.
the class AbstractDBCollectionInterceptor method getPtCollection.
protected DBCollection getPtCollection(DBCollection bizDbCollection, Advice advice) throws Throwable {
String busCollectionName = getCollectionName(bizDbCollection);
if ("$cmd".equals(busCollectionName)) {
return null;
}
if (Pradar.isClusterTestPrefix(busCollectionName) || Pradar.isClusterTestPrefix(bizDbCollection.getDB().getName())) {
return null;
}
if (StringUtils.isBlank(busCollectionName)) {
throw new PressureMeasureError("mongo压测请求获取业务collection异常");
}
DBCollection ptCollection = collectionMapping.get(busCollectionName);
if (ptCollection == null) {
ptCollection = collectionMapping.get(busCollectionName);
synchronized (lock) {
if (ptCollection == null) {
ShadowDatabaseConfig shadowDatabaseConfig = getShadowDatabaseConfig(bizDbCollection);
if (shadowDatabaseConfig == null) {
if (isRead()) {
// 读操作,未配置影子表,直接读取业务表
return null;
} else {
ErrorReporter.buildError().setErrorType(ErrorTypeEnum.DataSource).setErrorCode("datasource-0002").setMessage("mongodb影子库/表未配置!").setDetail("业务库配置:::url: " + bizDbCollection.getDB().getMongo().getAddress().toString()).report();
throw new PressureMeasureError("mongodb影子库/表未配置");
}
}
if (shadowDatabaseConfig.isShadowDatabase()) {
ptCollection = doShadowDatabase(bizDbCollection, busCollectionName, shadowDatabaseConfig, advice);
} else {
ptCollection = doShadowTable(bizDbCollection, busCollectionName, shadowDatabaseConfig);
}
if (ptCollection != null) {
ptCollection.setWriteConcern(bizDbCollection.getWriteConcern());
ptCollection.setDBDecoderFactory(bizDbCollection.getDBDecoderFactory());
ptCollection.setDBEncoderFactory(bizDbCollection.getDBEncoderFactory());
ptCollection.setObjectClass(bizDbCollection.getObjectClass());
ptCollection.setReadPreference(bizDbCollection.getReadPreference());
ptCollection.setOptions(bizDbCollection.getOptions());
collectionMapping.put(busCollectionName, ptCollection);
}
}
}
}
return ptCollection;
}
Aggregations