further refactored jobs

This commit is contained in:
Alex Huang 2013-06-04 13:54:33 -07:00
parent 51f533e97a
commit dd11254087
36 changed files with 313 additions and 283 deletions

View File

@ -809,10 +809,10 @@
<bean id="asyncJobJournalDaoImpl" class="org.apache.cloudstack.framework.jobs.dao.AsyncJobJournalDaoImpl" />
<bean id="asyncJobJoinMapDaoImpl" class="org.apache.cloudstack.framework.jobs.dao.AsyncJobJoinMapDaoImpl" />
<bean id="asyncJobManagerImpl" class="com.cloud.async.AsyncJobManagerImpl"/>
<bean id="asyncJobMonitor" class="org.apache.cloudstack.framework.jobs.AsyncJobMonitor"/>
<bean id="asyncJobMonitor" class="org.apache.cloudstack.framework.jobs.impl.AsyncJobMonitor"/>
<bean id="syncQueueDaoImpl" class="org.apache.cloudstack.framework.jobs.dao.SyncQueueDaoImpl" />
<bean id="syncQueueItemDaoImpl" class="org.apache.cloudstack.framework.jobs.dao.SyncQueueItemDaoImpl" />
<bean id="syncQueueManagerImpl" class="org.apache.cloudstack.framework.jobs.SyncQueueManagerImpl" />
<bean id="syncQueueManagerImpl" class="org.apache.cloudstack.framework.jobs.impl.SyncQueueManagerImpl" />
<bean id="ApiAsyncJobDispatcher" class="com.cloud.api.ApiAsyncJobDispatcher">
<property name="name" value="ApiAsyncJobDispatcher" />

View File

@ -1,233 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.serializer;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.cloud.utils.DateUtil;
import com.cloud.utils.Pair;
import com.google.gson.Gson;
/**
* Note: toPairList and appendPairList only support simple POJO objects currently
*/
public class SerializerHelper {
public static final Logger s_logger = Logger.getLogger(SerializerHelper.class.getName());
public static String token = "/";
public static String toSerializedString(Object result) {
if(result != null) {
Class<?> clz = result.getClass();
Gson gson = GsonHelper.getGson();
return clz.getName() + token + gson.toJson(result);
}
return null;
}
public static Object fromSerializedString(String result) {
try {
if(result != null && !result.isEmpty()) {
String[] serializedParts = result.split(token);
if (serializedParts.length < 2) {
return null;
}
String clzName = serializedParts[0];
String nameField = null;
String content = null;
if (serializedParts.length == 2) {
content = serializedParts[1];
} else {
nameField = serializedParts[1];
int index = result.indexOf(token + nameField + token);
content = result.substring(index + nameField.length() + 2);
}
Class<?> clz;
try {
clz = Class.forName(clzName);
} catch (ClassNotFoundException e) {
return null;
}
Gson gson = GsonHelper.getGson();
Object obj = gson.fromJson(content, clz);
return obj;
}
return null;
} catch(RuntimeException e) {
s_logger.error("Caught runtime exception when doing GSON deserialization on: " + result);
throw e;
}
}
public static String toObjectSerializedString(Serializable object) {
assert(object != null);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
try {
ObjectOutputStream os = new ObjectOutputStream(bs);
os.writeObject(object);
os.close();
bs.close();
return Base64.encodeBase64URLSafeString(bs.toByteArray());
} catch(IOException e) {
s_logger.error("Unexpected exception", e);
}
return null;
}
public static Object fromObjectSerializedString(String base64EncodedString) {
if(base64EncodedString == null)
return null;
byte[] content = Base64.decodeBase64(base64EncodedString);
ByteArrayInputStream bs = new ByteArrayInputStream(content);
try {
ObjectInputStream is = new ObjectInputStream(bs);
Object obj = is.readObject();
is.close();
bs.close();
return obj;
} catch(IOException e) {
s_logger.error("Unexpected exception", e);
} catch(ClassNotFoundException e) {
s_logger.error("Unexpected exception", e);
}
return null;
}
public static List<Pair<String, Object>> toPairList(Object o, String name) {
List<Pair<String, Object>> l = new ArrayList<Pair<String, Object>>();
return appendPairList(l, o, name);
}
public static List<Pair<String, Object>> appendPairList(List<Pair<String, Object>> l, Object o, String name) {
if(o != null) {
Class<?> clz = o.getClass();
if(clz.isPrimitive() || clz.getSuperclass() == Number.class || clz == String.class || clz == Date.class) {
l.add(new Pair<String, Object>(name, o.toString()));
return l;
}
for(Field f : clz.getDeclaredFields()) {
if((f.getModifiers() & Modifier.STATIC) != 0) {
continue;
}
Param param = f.getAnnotation(Param.class);
if(param == null) {
continue;
}
String propName = f.getName();
if(!param.propName().isEmpty()) {
propName = param.propName();
}
String paramName = param.name();
if(paramName.isEmpty()) {
paramName = propName;
}
Method method = getGetMethod(o, propName);
if(method != null) {
try {
Object fieldValue = method.invoke(o);
if(fieldValue != null) {
if (f.getType() == Date.class) {
l.add(new Pair<String, Object>(paramName, DateUtil.getOutputString((Date)fieldValue)));
} else {
l.add(new Pair<String, Object>(paramName, fieldValue.toString()));
}
}
//else
// l.add(new Pair<String, Object>(paramName, ""));
} catch (IllegalArgumentException e) {
s_logger.error("Illegal argument exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (IllegalAccessException e) {
s_logger.error("Illegal access exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (InvocationTargetException e) {
s_logger.error("Invocation target exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
}
}
}
}
return l;
}
private static Method getGetMethod(Object o, String propName) {
Method method = null;
String methodName = getGetMethodName("get", propName);
try {
method = o.getClass().getMethod(methodName);
} catch (SecurityException e1) {
s_logger.error("Security exception in getting POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (NoSuchMethodException e1) {
if(s_logger.isTraceEnabled()) {
s_logger.trace("POJO " + o.getClass().getName() + " does not have " + methodName + "() method for property: " + propName + ", will check is-prefixed method to see if it is boolean property");
}
}
if(method != null) {
return method;
}
methodName = getGetMethodName("is", propName);
try {
method = o.getClass().getMethod(methodName);
} catch (SecurityException e1) {
s_logger.error("Security exception in getting POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (NoSuchMethodException e1) {
s_logger.warn("POJO " + o.getClass().getName() + " does not have " + methodName + "() method for property: " + propName);
}
return method;
}
private static String getGetMethodName(String prefix, String fieldName) {
StringBuffer sb = new StringBuffer(prefix);
if(fieldName.length() >= prefix.length() && fieldName.substring(0, prefix.length()).equals(prefix)) {
return fieldName;
} else {
sb.append(fieldName.substring(0, 1).toUpperCase());
sb.append(fieldName.substring(1));
}
return sb.toString();
}
}

View File

@ -24,7 +24,7 @@ import javax.persistence.Enumerated;
import javax.persistence.PrimaryKeyJoinColumn;
import javax.persistence.Table;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.Type;

View File

@ -18,6 +18,7 @@ package org.apache.cloudstack.framework.jobs;
import java.util.Date;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItem;
import org.apache.cloudstack.jobs.Job;
public interface AsyncJob extends Job {

View File

@ -19,6 +19,7 @@ package org.apache.cloudstack.framework.jobs;
import java.util.List;
import org.apache.cloudstack.api.command.user.job.QueryAsyncJobResultCmd;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import com.cloud.utils.Predicate;
import com.cloud.utils.component.Manager;

View File

@ -19,7 +19,7 @@ package org.apache.cloudstack.framework.jobs.dao;
import java.util.Date;
import java.util.List;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import com.cloud.utils.db.GenericDao;

View File

@ -24,7 +24,7 @@ import java.util.List;
import org.apache.log4j.Logger;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;

View File

@ -18,7 +18,7 @@ package org.apache.cloudstack.framework.jobs.dao;
import java.util.List;
import org.apache.cloudstack.framework.jobs.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJoinMapVO;
import com.cloud.utils.db.GenericDao;

View File

@ -27,7 +27,7 @@ import java.util.TimeZone;
import org.apache.log4j.Logger;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJoinMapVO;
import com.cloud.utils.DateUtil;
import com.cloud.utils.db.GenericDaoBase;

View File

@ -18,7 +18,7 @@ package org.apache.cloudstack.framework.jobs.dao;
import java.util.List;
import org.apache.cloudstack.framework.jobs.AsyncJobJournalVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJournalVO;
import com.cloud.utils.db.GenericDao;

View File

@ -18,7 +18,7 @@ package org.apache.cloudstack.framework.jobs.dao;
import java.util.List;
import org.apache.cloudstack.framework.jobs.AsyncJobJournalVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJournalVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;

View File

@ -16,7 +16,7 @@
// under the License.
package org.apache.cloudstack.framework.jobs.dao;
import org.apache.cloudstack.framework.jobs.SyncQueueVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueVO;
import com.cloud.utils.db.GenericDao;

View File

@ -24,7 +24,7 @@ import java.util.TimeZone;
import org.apache.log4j.Logger;
import org.apache.cloudstack.framework.jobs.SyncQueueVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueVO;
import com.cloud.utils.DateUtil;
import com.cloud.utils.db.GenericDaoBase;

View File

@ -18,7 +18,7 @@ package org.apache.cloudstack.framework.jobs.dao;
import java.util.List;
import org.apache.cloudstack.framework.jobs.SyncQueueItemVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItemVO;
import com.cloud.utils.db.GenericDao;

View File

@ -27,7 +27,7 @@ import java.util.TimeZone;
import org.apache.log4j.Logger;
import org.apache.cloudstack.framework.jobs.SyncQueueItemVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItemVO;
import com.cloud.utils.DateUtil;
import com.cloud.utils.db.DB;

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.Date;

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.Date;
@ -27,6 +27,9 @@ import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJob.JournalType;
import com.cloud.utils.DateUtil;
import com.cloud.utils.db.GenericDao;

View File

@ -14,13 +14,17 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.Date;
import java.util.TimeZone;
import javax.management.StandardMBean;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobMBean;
import com.cloud.utils.DateUtil;
public class AsyncJobMBeanImpl extends StandardMBean implements AsyncJobMBean {

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.HashMap;
import java.util.Map;
@ -26,6 +26,9 @@ import javax.naming.ConfigurationException;
import org.apache.log4j.Logger;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJob.Topics;
import org.apache.cloudstack.framework.messagebus.MessageBus;
import org.apache.cloudstack.framework.messagebus.MessageDispatcher;
import org.apache.cloudstack.framework.messagebus.MessageHandler;

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.Date;
import java.util.UUID;
@ -33,6 +33,7 @@ import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.jobs.Job;
import com.cloud.utils.UuidUtils;

View File

@ -0,0 +1,127 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs.impl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.cloud.utils.exception.CloudRuntimeException;
/**
* Note: toPairList and appendPairList only support simple POJO objects currently
*/
public class JobSerializerHelper {
private static final Logger s_logger = Logger.getLogger(JobSerializerHelper.class);
public static String token = "/";
private static Gson s_gson;
static {
GsonBuilder gsonBuilder = new GsonBuilder();
gsonBuilder.setVersion(1.5);
s_logger.debug("Job GSON Builder initialized.");
s_gson = gsonBuilder.create();
}
public static String toSerializedString(Object result) {
if(result != null) {
Class<?> clz = result.getClass();
return clz.getName() + token + s_gson.toJson(result);
}
return null;
}
public static Object fromSerializedString(String result) {
try {
if(result != null && !result.isEmpty()) {
String[] serializedParts = result.split(token);
if (serializedParts.length < 2) {
return null;
}
String clzName = serializedParts[0];
String nameField = null;
String content = null;
if (serializedParts.length == 2) {
content = serializedParts[1];
} else {
nameField = serializedParts[1];
int index = result.indexOf(token + nameField + token);
content = result.substring(index + nameField.length() + 2);
}
Class<?> clz;
try {
clz = Class.forName(clzName);
} catch (ClassNotFoundException e) {
return null;
}
Object obj = s_gson.fromJson(content, clz);
return obj;
}
return null;
} catch(RuntimeException e) {
throw new CloudRuntimeException("Unable to deserialize: " + result, e);
}
}
public static String toObjectSerializedString(Serializable object) {
assert(object != null);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
try {
ObjectOutputStream os = new ObjectOutputStream(bs);
os.writeObject(object);
os.close();
bs.close();
return Base64.encodeBase64URLSafeString(bs.toByteArray());
} catch(IOException e) {
throw new CloudRuntimeException("Unable to serialize: " + object, e);
}
}
public static Object fromObjectSerializedString(String base64EncodedString) {
if(base64EncodedString == null)
return null;
byte[] content = Base64.decodeBase64(base64EncodedString);
ByteArrayInputStream bs = new ByteArrayInputStream(content);
try {
ObjectInputStream is = new ObjectInputStream(bs);
Object obj = is.readObject();
is.close();
bs.close();
return obj;
} catch(IOException e) {
throw new CloudRuntimeException("Unable to serialize: " + base64EncodedString, e);
} catch (ClassNotFoundException e) {
throw new CloudRuntimeException("Unable to serialize: " + base64EncodedString, e);
}
}
}

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
public interface SyncQueueItem {
public final String AsyncJobContentType = "AsyncJob";

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import org.apache.cloudstack.api.InternalIdentity;

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.List;

View File

@ -14,7 +14,7 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import java.util.ArrayList;
import java.util.Date;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs;
package org.apache.cloudstack.framework.jobs.impl;
import org.apache.cloudstack.api.InternalIdentity;

View File

@ -16,12 +16,27 @@
// under the License.
package com.cloud.api;
import com.google.gson.GsonBuilder;
import org.apache.cloudstack.api.ResponseObject;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import com.google.gson.GsonBuilder;
import org.apache.cloudstack.api.ResponseObject;
import com.cloud.serializer.Param;
import com.cloud.utils.DateUtil;
import com.cloud.utils.Pair;
public class ApiGsonHelper {
private static final Logger s_logger = Logger.getLogger(ApiGsonHelper.class);
private static final GsonBuilder s_gBuilder;
static {
s_gBuilder = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
@ -33,4 +48,107 @@ public class ApiGsonHelper {
public static GsonBuilder getBuilder() {
return s_gBuilder;
}
public static List<Pair<String, Object>> toPairList(Object o, String name) {
List<Pair<String, Object>> l = new ArrayList<Pair<String, Object>>();
return appendPairList(l, o, name);
}
public static List<Pair<String, Object>> appendPairList(List<Pair<String, Object>> l, Object o, String name) {
if (o != null) {
Class<?> clz = o.getClass();
if (clz.isPrimitive() || clz.getSuperclass() == Number.class || clz == String.class || clz == Date.class) {
l.add(new Pair<String, Object>(name, o.toString()));
return l;
}
for (Field f : clz.getDeclaredFields()) {
if ((f.getModifiers() & Modifier.STATIC) != 0) {
continue;
}
Param param = f.getAnnotation(Param.class);
if (param == null) {
continue;
}
String propName = f.getName();
if (!param.propName().isEmpty()) {
propName = param.propName();
}
String paramName = param.name();
if (paramName.isEmpty()) {
paramName = propName;
}
Method method = getGetMethod(o, propName);
if (method != null) {
try {
Object fieldValue = method.invoke(o);
if (fieldValue != null) {
if (f.getType() == Date.class) {
l.add(new Pair<String, Object>(paramName, DateUtil.getOutputString((Date)fieldValue)));
} else {
l.add(new Pair<String, Object>(paramName, fieldValue.toString()));
}
}
//else
// l.add(new Pair<String, Object>(paramName, ""));
} catch (IllegalArgumentException e) {
s_logger.error("Illegal argument exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (IllegalAccessException e) {
s_logger.error("Illegal access exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (InvocationTargetException e) {
s_logger.error("Invocation target exception when calling POJO " + o.getClass().getName() + " get method for property: " + propName);
}
}
}
}
return l;
}
private static Method getGetMethod(Object o, String propName) {
Method method = null;
String methodName = getGetMethodName("get", propName);
try {
method = o.getClass().getMethod(methodName);
} catch (SecurityException e1) {
s_logger.error("Security exception in getting POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (NoSuchMethodException e1) {
if (s_logger.isTraceEnabled()) {
s_logger.trace("POJO " + o.getClass().getName() + " does not have " + methodName + "() method for property: " + propName
+ ", will check is-prefixed method to see if it is boolean property");
}
}
if (method != null) {
return method;
}
methodName = getGetMethodName("is", propName);
try {
method = o.getClass().getMethod(methodName);
} catch (SecurityException e1) {
s_logger.error("Security exception in getting POJO " + o.getClass().getName() + " get method for property: " + propName);
} catch (NoSuchMethodException e1) {
s_logger.warn("POJO " + o.getClass().getName() + " does not have " + methodName + "() method for property: " + propName);
}
return method;
}
private static String getGetMethodName(String prefix, String fieldName) {
StringBuffer sb = new StringBuffer(prefix);
if (fieldName.length() >= prefix.length() && fieldName.substring(0, prefix.length()).equals(prefix)) {
return fieldName;
} else {
sb.append(fieldName.substring(0, 1).toUpperCase());
sb.append(fieldName.substring(1));
}
return sb.toString();
}
}

View File

@ -116,7 +116,7 @@ import org.apache.cloudstack.api.response.ListResponse;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import org.apache.cloudstack.region.RegionManager;
import com.cloud.api.response.ApiResponseSerializer;

View File

@ -20,15 +20,15 @@ import javax.inject.Inject;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.SyncQueueItem;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobJoinMapDao;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.impl.JobSerializerHelper;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItem;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.serializer.SerializerHelper;
import com.cloud.utils.component.ComponentContext;
public class AsyncJobExecutionContext {
@ -109,7 +109,7 @@ public class AsyncJobExecutionContext {
AsyncJobJoinMapVO record = _joinMapDao.getJoinRecord(_job.getId(), joinedJobId);
if(record.getJoinStatus() == AsyncJobConstants.STATUS_FAILED && record.getJoinResult() != null) {
Object exception = SerializerHelper.fromObjectSerializedString(record.getJoinResult());
Object exception = JobSerializerHelper.fromObjectSerializedString(record.getJoinResult());
if(exception != null && exception instanceof Exception) {
if(exception instanceof InsufficientCapacityException)
throw (InsufficientCapacityException)exception;

View File

@ -43,19 +43,19 @@ import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobDispatcher;
import org.apache.cloudstack.framework.jobs.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.AsyncJobJournalVO;
import org.apache.cloudstack.framework.jobs.AsyncJobMBeanImpl;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.AsyncJobMonitor;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.SyncQueueItem;
import org.apache.cloudstack.framework.jobs.SyncQueueItemVO;
import org.apache.cloudstack.framework.jobs.SyncQueueManager;
import org.apache.cloudstack.framework.jobs.SyncQueueVO;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobDao;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobJoinMapDao;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobJournalDao;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJournalVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobMBeanImpl;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobMonitor;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItem;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueItemVO;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueManager;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueVO;
import org.apache.cloudstack.framework.messagebus.MessageBus;
import org.apache.cloudstack.framework.messagebus.MessageDetector;
import org.apache.cloudstack.framework.messagebus.PublishScope;

View File

@ -35,8 +35,8 @@ import org.apache.cloudstack.api.command.user.snapshot.CreateSnapshotCmd;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.framework.jobs.AsyncJobConstants;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobDao;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import com.cloud.api.ApiDispatcher;
import com.cloud.api.ApiGsonHelper;

View File

@ -68,7 +68,7 @@ import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
import org.apache.cloudstack.framework.async.AsyncCallFuture;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
import org.apache.log4j.Logger;

View File

@ -29,8 +29,8 @@ import org.apache.log4j.Logger;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.framework.jobs.AsyncJob;
import org.apache.cloudstack.framework.jobs.AsyncJobDispatcher;
import org.apache.cloudstack.framework.jobs.AsyncJobJoinMapVO;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobJoinMapDao;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobJoinMapVO;
import org.apache.cloudstack.vm.jobs.VmWorkJobDao;
import org.apache.cloudstack.vm.jobs.VmWorkJobVO;

View File

@ -41,7 +41,6 @@ import com.cloud.deploy.DeploymentPlan;
import com.cloud.deploy.DeploymentPlanner.ExcludeList;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.InsufficientStorageCapacityException;
import com.cloud.serializer.SerializerHelper;
import com.cloud.utils.LogUtils;
import com.cloud.utils.Predicate;
import com.cloud.utils.component.ComponentContext;
@ -49,7 +48,8 @@ import com.cloud.utils.db.Transaction;
import com.google.gson.Gson;
import org.apache.cloudstack.framework.jobs.AsyncJobManager;
import org.apache.cloudstack.framework.jobs.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobVO;
import org.apache.cloudstack.framework.jobs.impl.JobSerializerHelper;
import org.apache.cloudstack.vm.jobs.VmWorkJobDao;
import org.apache.cloudstack.vm.jobs.VmWorkJobVO;
import org.apache.cloudstack.vm.jobs.VmWorkJobVO.Step;
@ -167,10 +167,10 @@ public class VmWorkTest extends TestCase {
public void testExceptionSerialization() {
InsufficientCapacityException exception = new InsufficientStorageCapacityException("foo", VmWorkJobVO.class, 1L);
String encodedString = SerializerHelper.toObjectSerializedString(exception);
String encodedString = JobSerializerHelper.toObjectSerializedString(exception);
System.out.println(encodedString);
exception = (InsufficientCapacityException)SerializerHelper.fromObjectSerializedString(encodedString);
exception = (InsufficientCapacityException)JobSerializerHelper.fromObjectSerializedString(encodedString);
Assert.assertTrue(exception.getScope() == VmWorkJobVO.class);
Assert.assertTrue(exception.getMessage().equals("foo"));
}

View File

@ -20,9 +20,6 @@ import org.mockito.Mockito;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.apache.cloudstack.framework.jobs.AsyncJobMonitor;
import org.apache.cloudstack.framework.jobs.SyncQueueManager;
import org.apache.cloudstack.framework.jobs.SyncQueueManagerImpl;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobDao;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobDaoImpl;
import org.apache.cloudstack.framework.jobs.dao.AsyncJobJoinMapDao;
@ -33,6 +30,9 @@ import org.apache.cloudstack.framework.jobs.dao.SyncQueueDao;
import org.apache.cloudstack.framework.jobs.dao.SyncQueueDaoImpl;
import org.apache.cloudstack.framework.jobs.dao.SyncQueueItemDao;
import org.apache.cloudstack.framework.jobs.dao.SyncQueueItemDaoImpl;
import org.apache.cloudstack.framework.jobs.impl.AsyncJobMonitor;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueManager;
import org.apache.cloudstack.framework.jobs.impl.SyncQueueManagerImpl;
import org.apache.cloudstack.vm.jobs.VmWorkJobDao;
import org.apache.cloudstack.vm.jobs.VmWorkJobDaoImpl;

View File

@ -63,6 +63,11 @@
<artifactId>commons-codec</artifactId>
<version>${cs.codec.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${cs.gson.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>