我有一个jsf项目,它以前已经在oracle application server上部署过多次,它使用eclipse link 2.2.0,在最近的一个新安装中,站点在从DB加载数据时崩溃,因为它不停地执行相同的查询上千次,然后抛出stackoverflow异常
public ClassDescriptor buildTLEntityPCDAODescriptor() {
RelationalDescriptor descriptor = new RelationalDescriptor();
descriptor.setJavaClass(com.its.tabs.tpc.layers.dal.entity.toplink.TLEntityPCDAO.class);
descriptor.addTableName("TPC_ENTITIES");
descriptor.addPrimaryKeyFieldName("TPC_ENTITIES.ENTITY_ID");
// Inheritance Properties.
// Descriptor Properties.
descriptor.useSoftCacheWeakIdentityMap();
descriptor.setIdentityMapSize(100);
descriptor.useRemoteSoftCacheWeakIdentityMap();
descriptor.setRemoteIdentityMapSize(100);
descriptor.setSequenceNumberFieldName("TPC_ENTITIES.ENTITY_ID");
descriptor.setSequenceNumberName("SEQ_ENTITIES");
descriptor.setAlias("TLEntityPCDAO");
// Cache Invalidation Policy
TimeToLiveCacheInvalidationPolicy policy = new TimeToLiveCacheInvalidationPolicy(1);
policy.setShouldUpdateReadTimeOnUpdate(false);
descriptor.setCacheInvalidationPolicy(policy);
// Query Manager.
descriptor.getQueryManager().checkCacheForDoesExist();
DirectToFieldMapping descriptionMapping = new DirectToFieldMapping();
descriptionMapping.setAttributeName("description");
descriptionMapping.setFieldName("TPC_ENTITIES.DESCRIPTION");
descriptor.addMapping(descriptionMapping);
DirectToFieldMapping serviceProviderCodeMapping = new DirectToFieldMapping();
serviceProviderCodeMapping.setAttributeName("serviceProviderCode");
serviceProviderCodeMapping.setFieldName("TPC_ENTITIES.SERVICE_PROVIDER_CODE");
descriptor.addMapping(serviceProviderCodeMapping);
DirectToFieldMapping entity_idMapping = new DirectToFieldMapping();
entity_idMapping.setAttributeName("entity_id");
entity_idMapping.setNullValue(new Long(0));
entity_idMapping.setFieldName("TPC_ENTITIES.ENTITY_ID");
descriptor.addMapping(entity_idMapping);
DirectToFieldMapping idMapping = new DirectToFieldMapping();
idMapping.setAttributeName("id");
idMapping.readOnly();
idMapping.setFieldName("TPC_ENTITIES.ENTITY_ID");
descriptor.addMapping(idMapping);
DirectToFieldMapping levelIdMapping = new DirectToFieldMapping();
levelIdMapping.setAttributeName("levelId");
levelIdMapping.setNullValue(new Long(0)); //changed from -1 to 0
levelIdMapping.setFieldName("TPC_ENTITIES.LEVEL_ID");
descriptor.addMapping(levelIdMapping);
DirectToFieldMapping lookupIdMapping = new DirectToFieldMapping();
lookupIdMapping.setAttributeName("lookupId");
lookupIdMapping.setNullValue(new Long(0)); //changed from -1 to 0
lookupIdMapping.setFieldName("TPC_ENTITIES.LOOKUP_ID");
descriptor.addMapping(lookupIdMapping);
DirectToFieldMapping levelDepthMapping = new DirectToFieldMapping();
levelDepthMapping.setAttributeName("levelDepth");
levelDepthMapping.setFieldName("TPC_ENTITIES.LEVEL_DEPTH");
descriptor.addMapping(levelDepthMapping);
DirectToFieldMapping entity_sequenceMapping = new DirectToFieldMapping();
entity_sequenceMapping.setAttributeName("entity_sequence");
entity_sequenceMapping.setNullValue(new Long(0));
entity_sequenceMapping.setFieldName("TPC_ENTITIES.ENTITY_SEQUENCE");
descriptor.addMapping(entity_sequenceMapping);
DirectToFieldMapping entity_typeMapping = new DirectToFieldMapping();
entity_typeMapping.setAttributeName("entity_type");
entity_typeMapping.setFieldName("TPC_ENTITIES.ENTITY_TYPE");
descriptor.addMapping(entity_typeMapping);
DirectToFieldMapping nameMapping = new DirectToFieldMapping();
nameMapping.setAttributeName("name");
nameMapping.setFieldName("TPC_ENTITIES.NAME");
descriptor.addMapping(nameMapping);
DirectToFieldMapping parent_entity_idMapping = new DirectToFieldMapping();
parent_entity_idMapping.setAttributeName("parent_entity_id");
parent_entity_idMapping.setNullValue(new Long(0)); //changed from -1 to 0
parent_entity_idMapping.setFieldName("TPC_ENTITIES.PARENT_ENTITY_ID");
descriptor.addMapping(parent_entity_idMapping);
AggregateObjectMapping auditFieldsMapping = new AggregateObjectMapping();
auditFieldsMapping.setAttributeName("auditFields");
auditFieldsMapping.setReferenceClass(com.its.tabs.architecture.dal.persistency.AuditFields.class);
auditFieldsMapping.setIsNullAllowed(false);
auditFieldsMapping.addFieldNameTranslation("TPC_ENTITIES.CREATED_BY", "createdBy->DIRECT");
auditFieldsMapping.addFieldNameTranslation("TPC_ENTITIES.MODIFIED_BY", "modifiedBy->DIRECT");
auditFieldsMapping.addFieldNameTranslation("TPC_ENTITIES.CREATED_DATE", "createdDate->DIRECT");
auditFieldsMapping.addFieldNameTranslation("TPC_ENTITIES.MODIFIED_DATE", "modifiedDate->DIRECT");
descriptor.addMapping(auditFieldsMapping);
OneToManyMapping entitesMapping = new OneToManyMapping();
entitesMapping.setAttributeName("entites");
entitesMapping.setReferenceClass(com.its.tabs.tpc.layers.dal.entity.toplink.TLEntityPCDAO.class);
entitesMapping.useTransparentCollection();
entitesMapping.useCollectionClass(org.eclipse.persistence.indirection.IndirectList.class);
entitesMapping.addTargetForeignKeyFieldName("TPC_ENTITIES.PARENT_ENTITY_ID", "TPC_ENTITIES.ENTITY_ID");
descriptor.addMapping(entitesMapping);
OneToManyMapping entityattributesMapping = new OneToManyMapping();
entityattributesMapping.setAttributeName("entityattributes");
entityattributesMapping.setReferenceClass(com.its.tabs.tpc.layers.dal.entityattributes.toplink.TLEntityAttributesDAO.class);
entityattributesMapping.useTransparentCollection();
entityattributesMapping.useCollectionClass(org.eclipse.persistence.indirection.IndirectList.class);
entityattributesMapping.addTargetForeignKeyFieldName("TPC_ENTITY_ATTRIBUTES.ENTITY_ID", "TPC_ENTITIES.ENTITY_ID");
descriptor.addMapping(entityattributesMapping);
OneToOneMapping entityLevelIdMapping = new OneToOneMapping();
entityLevelIdMapping.setAttributeName("levels");
entityLevelIdMapping.setReferenceClass(TLLevelsDAO.class);
entityLevelIdMapping.useProxyIndirection();
entityLevelIdMapping.readOnly();
entityLevelIdMapping.addForeignKeyFieldName("TPC_ENTITIES.LEVEL_ID", "TPC_LEVELS.LEVEL_ID");
descriptor.addMapping(entityLevelIdMapping);
OneToOneMapping entityLookupIdMapping = new OneToOneMapping();
entityLookupIdMapping.setAttributeName("lookups");
entityLookupIdMapping.setReferenceClass(TLLookupDAO.class);
entityLookupIdMapping.useProxyIndirection();
entityLookupIdMapping.readOnly();
entityLookupIdMapping.addForeignKeyFieldName("TPC_ENTITIES.LOOKUP_ID", "TPC_LOOKUP.LOOKUP_ID");
descriptor.addMapping(entityLookupIdMapping);
return descriptor;
}
List<EntityPCEntity> entityList = new ArrayList<EntityPCEntity>();
entityList = (List<EntityPCEntity>)this.getTABSEntityManagerFinder().getEclipseLinkAdapter().readMultipleDAOs(TLEntityPCDAO.class);
return entityList;
例外
如果您查看您的失效策略:
TimeToLiveCacheInvalidationPolicy policy = new TimeToLiveCacheInvalidationPolicy(1);
policy.setShouldUpdateReadTimeOnUpdate(false);
descriptor.setCacheInvalidationPolicy(policy);
您已经将它设置为在一毫秒内无效,可能在它完成读入之前。虽然我看不到更多关于对象模型或正在执行的查询的信息,但这可能是问题的一个很大的原因,因为这意味着对该对象的任何引用几乎总是无效的,并被迫再次查询它。
在我的STS应用程序中实现Apache Ivy后收到以下控制台错误。 我添加了以下jar文件:
我有一个疑问。我有一个表,其中有些记录重复日期时间。我必须把最后一个从日期时间重复的地方拿来。示例: 它应该返回ID3、4和7 你能帮我吗?谢谢!
我正在将一个项目从GF 3.1.2(Eclipselink2.3.2)迁移到Payara 4.1.1.162(Eclipselink2.6.2)。 在部署war存档时,我遇到以下异常:
问题内容: 没有人有一个优雅的sql语句来删除表中的重复记录,但前提是重复项的数量多于x个?因此,最多允许2或3个重复项,仅此而已? 当前,我有一条执行以下操作的select语句: 这很好用。但是现在,我只想删除那些重复超过2个的行。 谢谢 问题答案: with cte as ( select row_number() over (partition by dupcol1, dupcol2 ord
我想用不同的列值执行相同的SQL查询,即:我有一个如下表: 从上表中,我想获取员工组的和。请建议我如何在不执行多次的情况下进行查询以获取上述数据。
我已经编写了一个Web服务使用在Glass 在GlassFishe.我从数据源获取数据库连接,并将其传递给jOOQ。 在请求结束时(基于这个问题),我关闭与Apache DBUtils的连接: 问题是我只能发送一个请求。之后,我得到连接关闭异常。GlassFish中的连接池配置为: 我正在关闭物理连接吗?我正在使用MySQL 5.5。32戴软呢帽19戴玻璃鱼4建89。有人能帮忙吗? [更新] 我已