mirror of https://github.com/apache/cloudstack.git
Compare commits
12 Commits
eb8685e803
...
ca030bb540
| Author | SHA1 | Date |
|---|---|---|
|
|
ca030bb540 | |
|
|
bce3e54a7e | |
|
|
6a9835904c | |
|
|
6846619a6f | |
|
|
d1eb2822d9 | |
|
|
43345d4ab8 | |
|
|
238d07276f | |
|
|
a52eadc964 | |
|
|
a713305e21 | |
|
|
1720ebb23d | |
|
|
d32ca11747 | |
|
|
3b70818567 |
|
|
@ -188,6 +188,7 @@ environmnet
|
||||||
equivalant
|
equivalant
|
||||||
erro
|
erro
|
||||||
erronous
|
erronous
|
||||||
|
errorprone
|
||||||
everthing
|
everthing
|
||||||
everytime
|
everytime
|
||||||
excetion
|
excetion
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,122 @@
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
name: Error Prone Analysis
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main, add-errorprone ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, '4.20' ]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
errorprone:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up JDK 11
|
||||||
|
uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
java-version: '11'
|
||||||
|
distribution: 'adopt'
|
||||||
|
architecture: x64
|
||||||
|
cache: maven
|
||||||
|
|
||||||
|
- name: Run Error Prone Static Analysis (Strict Mode)
|
||||||
|
id: errorprone
|
||||||
|
run: |
|
||||||
|
echo "::group::Error Prone Analysis"
|
||||||
|
# Temporarily remove -XepAllErrorsAsWarnings to run in strict mode
|
||||||
|
sed -i 's/-Xplugin:ErrorProne -XepAllErrorsAsWarnings/-Xplugin:ErrorProne/g' pom.xml
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
# Use -fae (fail-at-end) to build all modules and report failures at the end
|
||||||
|
# Run 'test' phase to compile and test all modules
|
||||||
|
mvn -fae clean test -T$(nproc) 2>&1 | tee errorprone.log
|
||||||
|
MVN_EXIT=${PIPESTATUS[0]}
|
||||||
|
|
||||||
|
echo "mvn_exit=${MVN_EXIT}" >> $GITHUB_OUTPUT
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Check for Error Prone Issues
|
||||||
|
id: check-errors
|
||||||
|
run: |
|
||||||
|
HAS_ERRORS=false
|
||||||
|
|
||||||
|
if [ "${{ steps.errorprone.outputs.mvn_exit }}" != "0" ]; then
|
||||||
|
HAS_ERRORS=true
|
||||||
|
echo "Maven build exited with code ${{ steps.errorprone.outputs.mvn_exit }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q "error: \[" errorprone.log; then
|
||||||
|
HAS_ERRORS=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q "^\[ERROR\]" errorprone.log; then
|
||||||
|
HAS_ERRORS=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$HAS_ERRORS" = "true" ]; then
|
||||||
|
echo "has_errors=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "::error::Error Prone and/or compilation issues found in the code"
|
||||||
|
echo ""
|
||||||
|
echo "=== Error Prone Issues ==="
|
||||||
|
grep -n "error: \[" errorprone.log | head -50 || echo "No Error Prone specific issues"
|
||||||
|
echo ""
|
||||||
|
echo "=== Maven [ERROR] Lines ==="
|
||||||
|
grep -n "^\[ERROR\]" errorprone.log | head -50 || echo "No Maven errors"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "## ⚠️ Error Prone Analysis Failed" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Error Prone static analysis and/or compilation detected issues in this PR." >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Error Prone Issues (first 50):" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||||
|
grep -n "error: \[" errorprone.log | head -50 >> $GITHUB_STEP_SUMMARY || echo "None" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Maven Compilation Errors (first 50):" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||||
|
grep -n "^\[ERROR\]" errorprone.log | head -50 >> $GITHUB_STEP_SUMMARY || echo "None" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "See the [Error Prone documentation](https://errorprone.info/) for details on each bug pattern." >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "has_errors=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "✅ No Error Prone issues found"
|
||||||
|
|
||||||
|
echo "## ✅ Error Prone Analysis Passed" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "No issues detected by Error Prone static analysis." >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Fail if errors found
|
||||||
|
if: steps.check-errors.outputs.has_errors == 'true'
|
||||||
|
run: exit 1
|
||||||
|
|
@ -78,6 +78,7 @@ public class UpdateNetworkOfferingCmd extends BaseCmd {
|
||||||
|
|
||||||
@Parameter(name = ApiConstants.DOMAIN_ID,
|
@Parameter(name = ApiConstants.DOMAIN_ID,
|
||||||
type = CommandType.STRING,
|
type = CommandType.STRING,
|
||||||
|
length = 4096,
|
||||||
description = "The ID of the containing domain(s) as comma separated string, public for public offerings")
|
description = "The ID of the containing domain(s) as comma separated string, public for public offerings")
|
||||||
private String domainIds;
|
private String domainIds;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -75,6 +75,7 @@ public class UpdateDiskOfferingCmd extends BaseCmd {
|
||||||
@Parameter(name = ApiConstants.ZONE_ID,
|
@Parameter(name = ApiConstants.ZONE_ID,
|
||||||
type = CommandType.STRING,
|
type = CommandType.STRING,
|
||||||
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
||||||
|
length = 4096,
|
||||||
since = "4.13")
|
since = "4.13")
|
||||||
private String zoneIds;
|
private String zoneIds;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -69,6 +69,7 @@ public class UpdateServiceOfferingCmd extends BaseCmd {
|
||||||
@Parameter(name = ApiConstants.ZONE_ID,
|
@Parameter(name = ApiConstants.ZONE_ID,
|
||||||
type = CommandType.STRING,
|
type = CommandType.STRING,
|
||||||
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
||||||
|
length = 4096,
|
||||||
since = "4.13")
|
since = "4.13")
|
||||||
private String zoneIds;
|
private String zoneIds;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -65,6 +65,7 @@ public class UpdateVPCOfferingCmd extends BaseAsyncCmd {
|
||||||
@Parameter(name = ApiConstants.ZONE_ID,
|
@Parameter(name = ApiConstants.ZONE_ID,
|
||||||
type = CommandType.STRING,
|
type = CommandType.STRING,
|
||||||
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
description = "The ID of the containing zone(s) as comma separated string, all for all zones offerings",
|
||||||
|
length = 4096,
|
||||||
since = "4.13")
|
since = "4.13")
|
||||||
private String zoneIds;
|
private String zoneIds;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -31,4 +31,6 @@ public interface VMScheduledJobDao extends GenericDao<VMScheduledJobVO, Long> {
|
||||||
int expungeJobsForSchedules(List<Long> scheduleId, Date dateAfter);
|
int expungeJobsForSchedules(List<Long> scheduleId, Date dateAfter);
|
||||||
|
|
||||||
int expungeJobsBefore(Date currentTimestamp);
|
int expungeJobsBefore(Date currentTimestamp);
|
||||||
|
|
||||||
|
VMScheduledJobVO findByScheduleAndTimestamp(long scheduleId, Date scheduledTimestamp);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -39,6 +39,8 @@ public class VMScheduledJobDaoImpl extends GenericDaoBase<VMScheduledJobVO, Long
|
||||||
|
|
||||||
private final SearchBuilder<VMScheduledJobVO> expungeJobForScheduleSearch;
|
private final SearchBuilder<VMScheduledJobVO> expungeJobForScheduleSearch;
|
||||||
|
|
||||||
|
private final SearchBuilder<VMScheduledJobVO> scheduleAndTimestampSearch;
|
||||||
|
|
||||||
static final String SCHEDULED_TIMESTAMP = "scheduled_timestamp";
|
static final String SCHEDULED_TIMESTAMP = "scheduled_timestamp";
|
||||||
|
|
||||||
static final String VM_SCHEDULE_ID = "vm_schedule_id";
|
static final String VM_SCHEDULE_ID = "vm_schedule_id";
|
||||||
|
|
@ -58,6 +60,11 @@ public class VMScheduledJobDaoImpl extends GenericDaoBase<VMScheduledJobVO, Long
|
||||||
expungeJobForScheduleSearch.and(VM_SCHEDULE_ID, expungeJobForScheduleSearch.entity().getVmScheduleId(), SearchCriteria.Op.IN);
|
expungeJobForScheduleSearch.and(VM_SCHEDULE_ID, expungeJobForScheduleSearch.entity().getVmScheduleId(), SearchCriteria.Op.IN);
|
||||||
expungeJobForScheduleSearch.and(SCHEDULED_TIMESTAMP, expungeJobForScheduleSearch.entity().getScheduledTime(), SearchCriteria.Op.GTEQ);
|
expungeJobForScheduleSearch.and(SCHEDULED_TIMESTAMP, expungeJobForScheduleSearch.entity().getScheduledTime(), SearchCriteria.Op.GTEQ);
|
||||||
expungeJobForScheduleSearch.done();
|
expungeJobForScheduleSearch.done();
|
||||||
|
|
||||||
|
scheduleAndTimestampSearch = createSearchBuilder();
|
||||||
|
scheduleAndTimestampSearch.and(VM_SCHEDULE_ID, scheduleAndTimestampSearch.entity().getVmScheduleId(), SearchCriteria.Op.EQ);
|
||||||
|
scheduleAndTimestampSearch.and(SCHEDULED_TIMESTAMP, scheduleAndTimestampSearch.entity().getScheduledTime(), SearchCriteria.Op.EQ);
|
||||||
|
scheduleAndTimestampSearch.done();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -92,4 +99,12 @@ public class VMScheduledJobDaoImpl extends GenericDaoBase<VMScheduledJobVO, Long
|
||||||
sc.setParameters(SCHEDULED_TIMESTAMP, date);
|
sc.setParameters(SCHEDULED_TIMESTAMP, date);
|
||||||
return expunge(sc);
|
return expunge(sc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VMScheduledJobVO findByScheduleAndTimestamp(long scheduleId, Date scheduledTimestamp) {
|
||||||
|
SearchCriteria<VMScheduledJobVO> sc = scheduleAndTimestampSearch.create();
|
||||||
|
sc.setParameters(VM_SCHEDULE_ID, scheduleId);
|
||||||
|
sc.setParameters(SCHEDULED_TIMESTAMP, scheduledTimestamp);
|
||||||
|
return findOneBy(sc);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
17
pom.xml
17
pom.xml
|
|
@ -80,6 +80,7 @@
|
||||||
<cs.surefire-plugin.version>2.22.2</cs.surefire-plugin.version>
|
<cs.surefire-plugin.version>2.22.2</cs.surefire-plugin.version>
|
||||||
<cs.clover-maven-plugin.version>4.4.1</cs.clover-maven-plugin.version>
|
<cs.clover-maven-plugin.version>4.4.1</cs.clover-maven-plugin.version>
|
||||||
<cs.exec-maven-plugin.version>3.2.0</cs.exec-maven-plugin.version>
|
<cs.exec-maven-plugin.version>3.2.0</cs.exec-maven-plugin.version>
|
||||||
|
<cs.errorprone.version>2.24.1</cs.errorprone.version>
|
||||||
|
|
||||||
<!-- Logging versions -->
|
<!-- Logging versions -->
|
||||||
<cs.log4j.version>2.19.0</cs.log4j.version>
|
<cs.log4j.version>2.19.0</cs.log4j.version>
|
||||||
|
|
@ -1094,15 +1095,25 @@
|
||||||
<configuration>
|
<configuration>
|
||||||
<source>${cs.jdk.version}</source>
|
<source>${cs.jdk.version}</source>
|
||||||
<target>${cs.jdk.version}</target>
|
<target>${cs.jdk.version}</target>
|
||||||
<fork>true</fork>
|
<encoding>UTF-8</encoding>
|
||||||
<meminitial>128m</meminitial>
|
|
||||||
<maxmem>512m</maxmem>
|
|
||||||
<compilerArgs>
|
<compilerArgs>
|
||||||
<arg>-XDignore.symbol.file=true</arg>
|
<arg>-XDignore.symbol.file=true</arg>
|
||||||
<arg>--add-opens=java.base/java.lang=ALL-UNNAMED</arg>
|
<arg>--add-opens=java.base/java.lang=ALL-UNNAMED</arg>
|
||||||
<arg>--add-exports=java.base/sun.security.x509=ALL-UNNAMED</arg>
|
<arg>--add-exports=java.base/sun.security.x509=ALL-UNNAMED</arg>
|
||||||
<arg>--add-exports=java.base/sun.security.provider=ALL-UNNAMED</arg>
|
<arg>--add-exports=java.base/sun.security.provider=ALL-UNNAMED</arg>
|
||||||
|
<arg>-XDcompilePolicy=simple</arg>
|
||||||
|
<arg>-Xplugin:ErrorProne -XepAllErrorsAsWarnings</arg>
|
||||||
</compilerArgs>
|
</compilerArgs>
|
||||||
|
<annotationProcessorPaths>
|
||||||
|
<path>
|
||||||
|
<groupId>com.google.errorprone</groupId>
|
||||||
|
<artifactId>error_prone_core</artifactId>
|
||||||
|
<version>${cs.errorprone.version}</version>
|
||||||
|
</path>
|
||||||
|
</annotationProcessorPaths>
|
||||||
|
<fork>true</fork>
|
||||||
|
<meminitial>128m</meminitial>
|
||||||
|
<maxmem>512m</maxmem>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
|
|
||||||
|
|
@ -162,7 +162,13 @@ public class VMSchedulerImpl extends ManagerBase implements VMScheduler, Configu
|
||||||
}
|
}
|
||||||
|
|
||||||
Date scheduledDateTime = Date.from(ts.toInstant());
|
Date scheduledDateTime = Date.from(ts.toInstant());
|
||||||
VMScheduledJobVO scheduledJob = new VMScheduledJobVO(vmSchedule.getVmId(), vmSchedule.getId(), vmSchedule.getAction(), scheduledDateTime);
|
VMScheduledJobVO scheduledJob = vmScheduledJobDao.findByScheduleAndTimestamp(vmSchedule.getId(), scheduledDateTime);
|
||||||
|
if (scheduledJob != null) {
|
||||||
|
logger.trace("Job is already scheduled for schedule {} at {}", vmSchedule, scheduledDateTime);
|
||||||
|
return scheduledDateTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
scheduledJob = new VMScheduledJobVO(vmSchedule.getVmId(), vmSchedule.getId(), vmSchedule.getAction(), scheduledDateTime);
|
||||||
try {
|
try {
|
||||||
vmScheduledJobDao.persist(scheduledJob);
|
vmScheduledJobDao.persist(scheduledJob);
|
||||||
ActionEventUtils.onScheduledActionEvent(User.UID_SYSTEM, vm.getAccountId(), actionEventMap.get(vmSchedule.getAction()),
|
ActionEventUtils.onScheduledActionEvent(User.UID_SYSTEM, vm.getAccountId(), actionEventMap.get(vmSchedule.getAction()),
|
||||||
|
|
|
||||||
|
|
@ -218,18 +218,19 @@ export const notifierPlugin = {
|
||||||
if (error.response.status) {
|
if (error.response.status) {
|
||||||
msg = `${i18n.global.t('message.request.failed')} (${error.response.status})`
|
msg = `${i18n.global.t('message.request.failed')} (${error.response.status})`
|
||||||
}
|
}
|
||||||
if (error.message) {
|
if (error.response.headers?.['x-description']) {
|
||||||
desc = error.message
|
|
||||||
}
|
|
||||||
if (error.response.headers && 'x-description' in error.response.headers) {
|
|
||||||
desc = error.response.headers['x-description']
|
desc = error.response.headers['x-description']
|
||||||
}
|
} else if (error.response.data) {
|
||||||
if (desc === '' && error.response.data) {
|
|
||||||
const responseKey = _.findKey(error.response.data, 'errortext')
|
const responseKey = _.findKey(error.response.data, 'errortext')
|
||||||
if (responseKey) {
|
if (responseKey) {
|
||||||
desc = error.response.data[responseKey].errortext
|
desc = error.response.data[responseKey].errortext
|
||||||
|
} else if (typeof error.response.data === 'string') {
|
||||||
|
desc = error.response.data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!desc && error.message) {
|
||||||
|
desc = error.message
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let countNotify = store.getters.countNotify
|
let countNotify = store.getters.countNotify
|
||||||
countNotify++
|
countNotify++
|
||||||
|
|
|
||||||
|
|
@ -638,11 +638,7 @@ export default {
|
||||||
this.$emit('refresh-data')
|
this.$emit('refresh-data')
|
||||||
this.closeAction()
|
this.closeAction()
|
||||||
}).catch(e => {
|
}).catch(e => {
|
||||||
this.$notification.error({
|
this.$notifyError(e)
|
||||||
message: this.$t('message.upload.failed'),
|
|
||||||
description: `${this.$t('message.upload.template.failed.description')} - ${e}`,
|
|
||||||
duration: 0
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
fetchCustomHypervisorName () {
|
fetchCustomHypervisorName () {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue